@accounts.route('/els', methods=['GET']) def els(): from elasticsearch_dsl import Search query = { "query": { "bool": { "should": [ {"match_phrase": {"content": "birileri var"}}, {"match_phrase": {"content": "şebnem ferah"}} ], "minimum_should_match": 1, "filter": [ { "range": { "publish_date": { "gte": "2023-02-28T21:00:00+00:00" } } } ] } }, "size": 10000, "from": 0 } s = Search( index='user_twitter,new_social_media_twitter,user_instagram,new_social_media_instagram,document_video', doc_type='doc' ).update_from_dict(query) result = s.execute() all_list = [] for item in result: data = item.to_dict() all_list.append({ 'index': item.meta.index, 'id': data.get('id'), 'url': data.get('original_url'), 'username': data.get('username'), 'name': data.get('name'), 'content': data.get('content'), 'publish_date': data.get('publish_date'), 'retweet': data.get('retweet'), 'favorite': data.get('favorite'), 'reply': data.get('reply'), 'user_followings': data.get('user_followings'), 'user_followers': data.get('user_followers'), 'user_totalviews': data.get('user_totalviews'), 'post_total_comments': data.get('post_total_comments'), 'user_post_count': data.get('user_post_count'), 'post_total_views': data.get('post_total_views'), }) import json print(len(all_list)) json_obj = json.dumps(all_list, indent=4) # save the object to file with open('els.json', 'w') as f: f.write(json_obj)