2021-02-06 20:12:19 +00:00
|
|
|
#!/usr/bin/python3
|
2021-02-07 18:48:07 +00:00
|
|
|
import threading
|
2021-02-06 20:12:19 +00:00
|
|
|
import requests
|
|
|
|
import hashlib
|
2021-02-07 01:56:08 +00:00
|
|
|
import string
|
2021-02-06 20:12:19 +00:00
|
|
|
import random
|
|
|
|
import time
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
CONFIG = {}
|
|
|
|
|
2021-02-07 00:21:58 +00:00
|
|
|
def add_igaccount(acc_id):
|
2021-02-07 13:55:02 +00:00
|
|
|
# user_create script must exist before running the API server
|
2021-02-07 02:01:57 +00:00
|
|
|
if not os.path.exists('./scripts/user_create'):
|
|
|
|
print('E| You may need to initialize the server environment first')
|
|
|
|
return 1
|
|
|
|
|
2021-02-11 00:09:45 +00:00
|
|
|
if not account_exists(acc_id):
|
2021-02-07 13:55:02 +00:00
|
|
|
# get all profile data from instagram acc
|
2021-02-07 01:56:08 +00:00
|
|
|
data = getig_user_data(acc_id)
|
2021-02-07 18:48:07 +00:00
|
|
|
|
|
|
|
# this tells us the user probably don't exist (or also might be a network error?)
|
2021-02-07 13:55:02 +00:00
|
|
|
if len(data.keys()) == 0:
|
|
|
|
print('E| User "{}" does not exist on Instagram'.format(acc_id))
|
|
|
|
return 2
|
|
|
|
|
2021-02-07 18:48:07 +00:00
|
|
|
# we don't and can't mirror private profiles
|
|
|
|
# (well, we can if we follow them and follow back, but we just don't need that)
|
|
|
|
if data['graphql']['user']['is_private']:
|
|
|
|
print('E| User "{}" is a private account. We just won\'t mirror that!'.format(acc_id))
|
|
|
|
return 3
|
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
# get account display name to create it
|
2021-02-11 00:09:45 +00:00
|
|
|
name = getig_user_fullname(data)
|
2021-02-07 01:56:08 +00:00
|
|
|
account = {
|
|
|
|
'name': name,
|
|
|
|
'username': acc_id,
|
|
|
|
'password': random_string()
|
|
|
|
}
|
2021-02-07 13:55:02 +00:00
|
|
|
# we are completely sure the parameters can't contain unwanted characters
|
|
|
|
# a shell exploit is not possible here :)
|
2021-02-07 01:59:14 +00:00
|
|
|
os.system('./scripts/user_create \'{}\' \'{}\' \'{}\''.format(\
|
|
|
|
account['name'], account['username'], account['password']))
|
2021-02-07 13:55:02 +00:00
|
|
|
|
|
|
|
# save the account login information for updates and mirroring
|
|
|
|
db_set('accounts', acc_id, account)
|
2021-02-07 18:48:07 +00:00
|
|
|
db_set('posts', acc_id, [])
|
2021-02-07 13:55:02 +00:00
|
|
|
|
|
|
|
# set Pixelfed account data for the username
|
|
|
|
pixelfed_setpic(acc_id, data['graphql']['user']['profile_pic_url'])
|
|
|
|
pixelfed_setinfo(acc_id, data['graphql']['user']['biography'],\
|
|
|
|
data['graphql']['user']['external_url'])
|
2021-02-08 10:49:29 +00:00
|
|
|
update_igaccount_async(acc_id, False)
|
2021-02-07 01:59:14 +00:00
|
|
|
else:
|
2021-02-07 02:01:57 +00:00
|
|
|
print('W| User "{}" already exists in local database'.format(acc_id))
|
2021-02-11 00:09:45 +00:00
|
|
|
|
2021-02-07 02:01:57 +00:00
|
|
|
return 0
|
2021-02-07 01:56:08 +00:00
|
|
|
|
2021-02-08 10:49:29 +00:00
|
|
|
def update_igaccount_async(acc_id, profileset=True):
|
|
|
|
threading.Thread(target=update_igaccount, args=(acc_id, profileset,)).start()
|
2021-02-07 19:13:47 +00:00
|
|
|
|
2021-02-08 10:49:29 +00:00
|
|
|
def update_igaccount(acc_id, profileset=True):
|
2021-02-11 19:48:10 +00:00
|
|
|
accdata = db_get('accounts', acc_id)
|
|
|
|
|
|
|
|
# Some IG might post faster than others, so we want to asign simple scheduling
|
|
|
|
# to alter the way accounts are updated depending on how much you want 1 account to update
|
|
|
|
if 'sched' in accdata:
|
|
|
|
sched_err_msg = 'E| User schedule is not configured correctly, for "{}": {}'.format(acc_id, accdata['sched'])
|
|
|
|
if not re.match(r'^\d+$', accdata['sched']):
|
|
|
|
print(sched_err_msg)
|
|
|
|
return 1
|
|
|
|
|
|
|
|
times = int(accdata['sched'])
|
|
|
|
scnow = random.randint(1, times)
|
|
|
|
if 'sched_now' in accdata:
|
|
|
|
scnow = int(accdata['sched_now'])
|
|
|
|
|
|
|
|
if scnow == times:
|
|
|
|
scnow = 1
|
|
|
|
else:
|
|
|
|
scnow += 1
|
|
|
|
|
|
|
|
accdata['sched_now'] = str(scnow)
|
|
|
|
db_set('accounts', acc_id, accdata)
|
|
|
|
if scnow != times:
|
|
|
|
print('I| Skipping user "{}" according to configured schedule: {} of {}'.format(\
|
|
|
|
acc_id, scnow, times))
|
|
|
|
return 1
|
|
|
|
|
2021-02-11 00:09:45 +00:00
|
|
|
# if account does not exist, we stop the update process
|
|
|
|
if not account_exists(acc_id):
|
2021-02-07 18:48:07 +00:00
|
|
|
print('E| User "'+acc_id+'" has not been created yet, maybe you wanted to call /<username>/add ?')
|
2021-02-11 19:48:10 +00:00
|
|
|
return 2
|
2021-02-07 18:48:07 +00:00
|
|
|
|
|
|
|
data = getig_user_data(acc_id)
|
2021-02-08 10:49:29 +00:00
|
|
|
if profileset:
|
2021-02-11 00:09:45 +00:00
|
|
|
# update the fullname of the user on local DB
|
|
|
|
accdata['name'] = getig_user_fullname(data)
|
|
|
|
db_set('accounts', acc_id, accdata)
|
|
|
|
|
|
|
|
# set the information from IG to the Pixelfed Account info
|
2021-02-08 10:49:29 +00:00
|
|
|
pixelfed_setpic(acc_id, data['graphql']['user']['profile_pic_url'])
|
|
|
|
pixelfed_setinfo(acc_id, data['graphql']['user']['biography'],\
|
|
|
|
data['graphql']['user']['external_url'])
|
2021-02-11 00:09:45 +00:00
|
|
|
|
2021-02-12 14:26:52 +00:00
|
|
|
# sincronize posts (images/videos...)
|
2021-02-07 19:13:47 +00:00
|
|
|
pixelfed_dlposts(acc_id, data['graphql']['user'])
|
2021-02-11 19:48:10 +00:00
|
|
|
return 0
|
2021-02-07 19:13:47 +00:00
|
|
|
|
|
|
|
def update_allaccounts_async():
|
|
|
|
threading.Thread(target=update_allaccounts).start()
|
2021-02-07 13:55:02 +00:00
|
|
|
|
|
|
|
def update_allaccounts():
|
2021-02-07 18:59:29 +00:00
|
|
|
# update all accounts with a timeout of 20 seconds
|
2021-02-09 11:08:37 +00:00
|
|
|
sleeptime = int(config()['timeout_btw_accounts'])
|
2021-02-09 11:13:05 +00:00
|
|
|
accounts = os.listdir('./db/accounts')
|
|
|
|
random.shuffle(accounts)
|
2021-02-10 23:28:08 +00:00
|
|
|
i = 0
|
2021-02-09 11:13:05 +00:00
|
|
|
for acc_id in accounts:
|
2021-02-10 23:28:08 +00:00
|
|
|
i += 1
|
2021-02-07 18:59:29 +00:00
|
|
|
print('I| mirroring account "{}"...'.format(acc_id))
|
2021-02-11 19:48:10 +00:00
|
|
|
ret = update_igaccount(acc_id)
|
|
|
|
if ret == 0:
|
|
|
|
print('I| {} of {} completed. Waiting {} seconds'.format(i, len(accounts), sleeptime))
|
|
|
|
time.sleep(sleeptime)
|
2021-02-07 18:59:29 +00:00
|
|
|
print()
|
2021-02-07 23:03:04 +00:00
|
|
|
print('I| done updating all accounts')
|
2021-02-07 13:55:02 +00:00
|
|
|
|
2021-02-08 20:07:49 +00:00
|
|
|
def delete_statuses(acc_id):
|
|
|
|
accdata = db_get('accounts', acc_id)
|
|
|
|
if not pixelfed_islogged(acc_id, accdata):
|
|
|
|
print('E| user "{}" is not logged in. Please log in'.format(acc_id))
|
|
|
|
return 1
|
|
|
|
|
2021-02-08 20:09:34 +00:00
|
|
|
# check our information to validate our account exists
|
2021-02-08 20:07:49 +00:00
|
|
|
r = requests.get( 'https://'+config()['instance']+'/api/pixelfed/v1/accounts/verify_credentials', cookies=accdata['cookie'])
|
2021-02-08 20:20:28 +00:00
|
|
|
if r.status_code != 200:
|
|
|
|
print('E| user "{}" is not logged in. Please log in'.format(acc_id))
|
|
|
|
return 2
|
2021-02-08 20:07:49 +00:00
|
|
|
pixdata = json.loads(r.text)
|
|
|
|
if not 'id' in pixdata:
|
|
|
|
print('E| fatal! API is not working!. Might be a connectivity issue or the Account does Not Exist??'.format(acc_id))
|
2021-02-08 20:20:28 +00:00
|
|
|
return 3
|
2021-02-08 20:07:49 +00:00
|
|
|
|
|
|
|
_, _token = pixelfed_token_url('', accdata['cookie'])
|
|
|
|
_headers = {
|
|
|
|
'Content-Type': 'application/json',
|
|
|
|
'X-Requested-With': 'XMLHttpRequest',
|
|
|
|
'X-CSRF-TOKEN': _token,
|
|
|
|
'X-XSRF-TOKEN': accdata['cookie']['XSRF-TOKEN']
|
|
|
|
}
|
2021-02-08 20:09:34 +00:00
|
|
|
|
|
|
|
# delete all statuses on pixelfed by Polling next N items
|
2021-02-08 20:07:49 +00:00
|
|
|
while True:
|
|
|
|
r2 = requests.get('https://'+config()['instance']+'/api/pixelfed/v1/accounts/{}/statuses?min_id=1'.format(pixdata['id']),\
|
|
|
|
cookies=accdata['cookie'] )
|
|
|
|
jsdata = json.loads(r2.text)
|
|
|
|
if not jsdata:
|
|
|
|
break
|
|
|
|
for status in jsdata:
|
|
|
|
print('I| deleting status "{}" for account "{}"... '.format(status['id'], acc_id), end='')
|
|
|
|
r3 = requests.post('https://'+config()['instance']+'/i/delete', json={'item': status['id'], 'type': 'status'},\
|
|
|
|
cookies=accdata['cookie'], headers=_headers)
|
|
|
|
print(r3.status_code)
|
2021-02-08 20:09:34 +00:00
|
|
|
|
|
|
|
db_set('posts', acc_id, [])
|
2021-02-08 20:07:49 +00:00
|
|
|
print('I| done nuking account posts for "{}"'.format(acc_id))
|
|
|
|
return 0
|
|
|
|
|
2021-02-10 18:07:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def pixelfed_logoutall_async():
|
|
|
|
threading.Thread(target=pixelfed_logoutall).start()
|
|
|
|
|
|
|
|
def pixelfed_logoutall():
|
|
|
|
for acc_id in os.listdir('./db/accounts'):
|
|
|
|
print('I| logging out account "{}": '.format(acc_id), end='')
|
|
|
|
if pixelfed_logout(acc_id):
|
|
|
|
print('ok')
|
|
|
|
else:
|
|
|
|
print('not logged')
|
|
|
|
print('I| done logging out all accounts\n')
|
|
|
|
|
|
|
|
def pixelfed_logout(acc_id):
|
2021-02-08 14:49:42 +00:00
|
|
|
accdata = db_get('accounts', acc_id)
|
2021-02-10 18:07:03 +00:00
|
|
|
if not pixelfed_islogged(acc_id, accdata):
|
|
|
|
return False
|
|
|
|
|
|
|
|
_, _token = pixelfed_token_url('', accdata['cookie'])
|
|
|
|
r = requests.post('https://'+config()['instance']+'/logout', data={'_token': _token}, cookies=accdata['cookie'])
|
|
|
|
|
2021-02-08 14:49:42 +00:00
|
|
|
del accdata['cookie']
|
|
|
|
db_set('accounts', acc_id, accdata)
|
2021-02-10 18:07:03 +00:00
|
|
|
return True
|
2021-02-07 13:55:02 +00:00
|
|
|
|
2021-02-10 18:07:03 +00:00
|
|
|
def pixelfed_loginall_async():
|
|
|
|
threading.Thread(target=pixelfed_loginall).start()
|
|
|
|
|
|
|
|
def pixelfed_loginall():
|
|
|
|
for acc_id in os.listdir('./db/accounts'):
|
|
|
|
print('I| logging in account "{}": '.format(acc_id), end='')
|
|
|
|
if pixelfed_login(acc_id):
|
|
|
|
print('ok')
|
|
|
|
else:
|
|
|
|
print('already logged')
|
|
|
|
print('I| done logging in all accounts\n')
|
2021-02-07 13:55:02 +00:00
|
|
|
|
|
|
|
def pixelfed_login(acc_id, force=False):
|
|
|
|
# check account is already logged in if not "force"
|
|
|
|
accdata = db_get('accounts', acc_id)
|
|
|
|
if not force and pixelfed_islogged(acc_id, accdata):
|
2021-02-10 18:07:03 +00:00
|
|
|
return False
|
2021-02-07 13:55:02 +00:00
|
|
|
|
|
|
|
# obtain one time tokens for the pixelfed instance
|
|
|
|
_cookies, _token = pixelfed_token_url()
|
|
|
|
|
|
|
|
# do the login post and retrieve the raw cookies, the rest of API calls will have this cookies
|
|
|
|
r = requests.post( 'https://'+config()['instance']+'/login' ,\
|
|
|
|
data={
|
|
|
|
'_token': _token, 'email': 'pixelfed.'+acc_id+'@localhost',
|
|
|
|
'password': accdata['password'], 'remember': 'on'
|
|
|
|
},
|
|
|
|
cookies=_cookies
|
|
|
|
)
|
|
|
|
|
|
|
|
# add the raw cookies to the account data for later calls
|
|
|
|
accdata['cookie'] = dict(r.cookies)
|
|
|
|
db_set('accounts', acc_id, accdata)
|
2021-02-10 18:07:03 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
def pixelfed_islogged(acc_id, accdata=None):
|
|
|
|
if accdata is None:
|
|
|
|
accdata = db_get('accounts', acc_id)
|
|
|
|
return 'cookie' in accdata
|
|
|
|
|
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
|
|
|
|
def pixelfed_token_url(url='', _cookies=None):
|
|
|
|
r = requests.get( 'https://'+config()['instance']+url, cookies=_cookies )
|
|
|
|
_token = re.search(r'name="_token".+value="([^"]+)"', r.text).group(1)
|
|
|
|
return r.cookies, _token
|
|
|
|
|
2021-02-12 14:26:52 +00:00
|
|
|
def pixelfed_dlstories_async():
|
|
|
|
threading.Thread(target=pixelfed_dlstories).start()
|
|
|
|
|
|
|
|
def pixelfed_dlstories():
|
|
|
|
# get reels_tray of the account (stories are called like this)
|
|
|
|
data = json.loads(instagram_get('https://i.instagram.com/api/v1/feed/reels_tray/', 1000000, {
|
|
|
|
'Host': 'i.instagram.com',
|
|
|
|
}))
|
|
|
|
for item in data['tray']:
|
|
|
|
# for now stories don't support videos:
|
|
|
|
# https://github.com/pixelfed/pixelfed/issues/2169
|
|
|
|
# So, we will upload them as posts
|
|
|
|
print(json.dumps(item, indent=4))
|
|
|
|
|
|
|
|
def pixelfed_dlposts(acc_id, data, is_story=False):
|
2021-02-07 18:48:07 +00:00
|
|
|
ts = []
|
|
|
|
items = []
|
2021-02-12 14:26:52 +00:00
|
|
|
if not is_story:
|
2021-02-07 18:48:07 +00:00
|
|
|
for edge in data['edge_owner_to_timeline_media']['edges']:
|
2021-02-12 14:26:52 +00:00
|
|
|
ts.append(edge['node']['taken_at_timestamp'])
|
2021-02-07 18:48:07 +00:00
|
|
|
for edge in data['edge_felix_video_timeline']['edges']:
|
2021-02-12 14:26:52 +00:00
|
|
|
ts.append(edge['node']['taken_at_timestamp'])
|
|
|
|
ts = sorted(ts)
|
|
|
|
for t in ts:
|
|
|
|
brkit = False
|
|
|
|
for edge in data['edge_owner_to_timeline_media']['edges']:
|
|
|
|
if edge['node']['taken_at_timestamp'] == t:
|
|
|
|
items.append(edge['node'])
|
|
|
|
brkit = True
|
|
|
|
break
|
|
|
|
if brkit:
|
|
|
|
continue
|
|
|
|
for edge in data['edge_felix_video_timeline']['edges']:
|
|
|
|
if edge['node']['taken_at_timestamp'] == t:
|
|
|
|
items.append(edge['node'])
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
print(data)
|
|
|
|
return
|
2021-02-07 18:48:07 +00:00
|
|
|
|
|
|
|
# mirror posts from the account (only the last N, without loading more),
|
|
|
|
# but only the ones that has not already been imported
|
|
|
|
accposts = db_get('posts', acc_id)
|
|
|
|
accdata = db_get('accounts', acc_id)
|
|
|
|
|
2021-02-08 19:28:12 +00:00
|
|
|
for item in items:
|
2021-02-08 20:07:49 +00:00
|
|
|
if item['shortcode'] in accposts:
|
2021-02-07 18:48:07 +00:00
|
|
|
print('I| skipping IG post {}:{}. Already added'.format(acc_id, item['shortcode']))
|
|
|
|
continue
|
|
|
|
|
2021-02-08 14:02:28 +00:00
|
|
|
print('I| >>>> {}:{}'.format(acc_id, item['shortcode']))
|
2021-02-07 18:48:07 +00:00
|
|
|
ig_url = 'https://www.instagram.com/p/{}/'.format(item['shortcode'])
|
|
|
|
title = item['title'] if 'title' in item else None
|
2021-02-07 23:03:04 +00:00
|
|
|
|
2021-02-07 18:48:07 +00:00
|
|
|
caption = item['edge_media_to_caption']['edges'][0]['node']['text'] \
|
|
|
|
if len(item['edge_media_to_caption']['edges']) > 0 else ''
|
2021-02-08 19:28:12 +00:00
|
|
|
altcaption = item['accessibility_caption'] if 'accessibility_caption' in item else ''
|
|
|
|
altcaption = '' if altcaption is None else altcaption
|
2021-02-07 23:03:04 +00:00
|
|
|
|
|
|
|
# add support for posts with multiple images
|
|
|
|
# get the data from the post URL. (we need all images, as IG can have not only 1 image in the post)
|
2021-02-09 11:01:16 +00:00
|
|
|
postdata = json.loads(instagram_get('/p/{}/?__a=1'.format(item['shortcode']), 216000))
|
2021-02-07 23:03:04 +00:00
|
|
|
multiple = False
|
|
|
|
multmedia = None
|
|
|
|
if len(postdata.keys()) > 0 and 'edge_sidecar_to_children' in postdata['graphql']['shortcode_media']:
|
|
|
|
multiple = True
|
|
|
|
multmedia = postdata['graphql']['shortcode_media']['edge_sidecar_to_children']['edges']
|
2021-02-07 18:48:07 +00:00
|
|
|
|
2021-02-08 19:28:12 +00:00
|
|
|
_headers = {
|
|
|
|
'Content-Type': 'application/json',
|
|
|
|
'X-Requested-With': 'XMLHttpRequest',
|
|
|
|
'X-XSRF-TOKEN': accdata['cookie']['XSRF-TOKEN']
|
|
|
|
}
|
|
|
|
jsdata_items = []
|
|
|
|
_token = None
|
|
|
|
failed = False
|
2021-02-07 23:03:04 +00:00
|
|
|
|
2021-02-08 19:28:12 +00:00
|
|
|
if item['is_video']:
|
|
|
|
# if the video is bigger than "max_video_size", we don't even try to upload it!
|
|
|
|
r = requests.head(item['video_url'])
|
|
|
|
size_in_mb = (int(r.headers['Content-Length']) / 1024 / 1024)
|
|
|
|
if size_in_mb > config()['max_video_size']:
|
|
|
|
# hack, we "add" it on posts so it doesn't try it again!
|
|
|
|
accposts.append(item['shortcode'])
|
2021-02-08 21:59:50 +00:00
|
|
|
print('W| video exceeds the configured Maximum of "{}MB"'.format(config()['max_video_size']))
|
2021-02-08 19:28:12 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
# try to upload to Pixelfed, it might failed depending on MAX_PHOTO_SIZE setting
|
2021-02-08 22:01:29 +00:00
|
|
|
print('I| fetching VIDEO for {}:{}... '.format(acc_id, item['shortcode']), end='')
|
2021-02-08 19:28:12 +00:00
|
|
|
_token, jsdata = pixelfed_postvideo(acc_id, item['video_url'])
|
|
|
|
if not jsdata:
|
|
|
|
print('err')
|
|
|
|
print('E| tried to upload a video of "{}MB" but failed. MAX_PHOTO_SIZE setting?'.format(int(size_in_mb)))
|
|
|
|
print()
|
|
|
|
continue
|
|
|
|
jsdata_items.append(jsdata)
|
|
|
|
print('done')
|
|
|
|
|
|
|
|
else:
|
2021-02-08 21:59:50 +00:00
|
|
|
print('I| uploading IMAGES for {}:{}... '.format(acc_id, item['shortcode']), end='')
|
2021-02-07 23:03:04 +00:00
|
|
|
media2iterate = [a['node']['display_url'] for a in multmedia] if multiple else [item['display_url']]
|
2021-02-08 19:28:12 +00:00
|
|
|
# we add support to multiple media here
|
2021-02-07 23:03:04 +00:00
|
|
|
for media in media2iterate:
|
|
|
|
_token, jsdata = pixelfed_postimage(acc_id, media, accdata)
|
|
|
|
if not jsdata:
|
|
|
|
print('E| Could not upload media for {}:{}'.format(acc_id, item['shortcode']))
|
|
|
|
failed = True
|
|
|
|
break
|
|
|
|
jsdata_items.append(jsdata)
|
|
|
|
if failed:
|
2021-02-07 18:48:07 +00:00
|
|
|
continue
|
2021-02-08 14:02:28 +00:00
|
|
|
print('done')
|
2021-02-07 23:03:04 +00:00
|
|
|
|
2021-02-08 19:28:12 +00:00
|
|
|
# add the accesibility captions
|
|
|
|
i = 0
|
|
|
|
_headers['X-CSRF-TOKEN'] = _token
|
|
|
|
for jsdata in jsdata_items:
|
|
|
|
jsdata['description'] = ig_url
|
|
|
|
jsdata['cw'] = False
|
|
|
|
jsdata['alt'] = ''
|
|
|
|
if multiple:
|
|
|
|
mnode = multmedia[i]['node']
|
|
|
|
if 'accessibility_caption' in mnode and not mnode['accessibility_caption'] is None:
|
|
|
|
jsdata['alt'] = mnode['accessibility_caption'][0:136]+'...' \
|
|
|
|
if len(mnode['accessibility_caption']) > 140 else mnode['accessibility_caption']
|
|
|
|
else:
|
|
|
|
jsdata['alt'] = altcaption[0:136]+'...' if len(altcaption) > 140 else altcaption
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
# publish the post using Pixelfed API
|
|
|
|
# the caption will be the original instagram URL
|
|
|
|
print('I| publishing post for {}:{}... '.format(acc_id, item['shortcode']), end='')
|
|
|
|
r = requests.post('https://'+config()['instance']+'/api/compose/v0/publish',\
|
|
|
|
json={"media": jsdata_items, "caption": ig_url, "visibility": "public", "cw": False,\
|
|
|
|
"comments_disabled": False, "place": False, "tagged": [],"optimize_media": True},\
|
|
|
|
cookies=accdata['cookie'], headers=_headers
|
|
|
|
)
|
|
|
|
|
|
|
|
# do a comment as it supports larger descriptions
|
|
|
|
if r.status_code == 200 and len(r.text) > 5:
|
|
|
|
ps = r.text.strip('/').split('/')
|
|
|
|
status_id = ps[len(ps)-1]
|
|
|
|
print('done | StatusID -> {}'.format(status_id))
|
|
|
|
|
|
|
|
print('I| publishing comments containing caption for {}:{}... '.format(acc_id, item['shortcode']), end='')
|
|
|
|
i = 1
|
|
|
|
failed = False
|
|
|
|
for comment in [caption[i:i+495] for i in range(0, len(caption), 495)]:
|
|
|
|
r2 = requests.post('https://'+config()['instance']+'/i/comment',\
|
|
|
|
json={'comment': '('+str(i)+') '+comment, 'item': status_id, 'sensitive': False},\
|
|
|
|
cookies=accdata['cookie'], headers=_headers
|
|
|
|
)
|
|
|
|
if not r2.status_code == 200:
|
|
|
|
failed = True
|
|
|
|
print('err. CODE -> {}'.format(r2.status_code))
|
|
|
|
print(r2.text)
|
|
|
|
break
|
2021-02-07 23:03:04 +00:00
|
|
|
i += 1
|
|
|
|
|
2021-02-08 19:28:12 +00:00
|
|
|
if not failed:
|
|
|
|
print('done')
|
|
|
|
accposts.append(item['shortcode'])
|
|
|
|
print('I| uploaded post {}:{} : OK'.format(acc_id, item['shortcode']))
|
2021-02-08 21:59:50 +00:00
|
|
|
time.sleep(int(config()['timeout_btw_posts']))
|
2021-02-08 19:28:12 +00:00
|
|
|
else:
|
|
|
|
print(r.text)
|
|
|
|
print(r.headers)
|
|
|
|
return
|
2021-02-08 14:02:28 +00:00
|
|
|
|
2021-02-08 21:59:50 +00:00
|
|
|
print('I| done updating "{}" account'.format(acc_id))
|
2021-02-07 18:48:07 +00:00
|
|
|
db_set('posts', acc_id, accposts)
|
|
|
|
|
|
|
|
# upload media and return data
|
|
|
|
def pixelfed_postimage(acc_id, image_url, accdata=None):
|
2021-02-08 19:28:12 +00:00
|
|
|
return pixelfed_postmedia(acc_id, image_url, 'jpg', accdata)
|
|
|
|
|
|
|
|
def pixelfed_postvideo(acc_id, video_url, accdata=None):
|
|
|
|
return pixelfed_postmedia(acc_id, video_url, 'mp4', accdata)
|
|
|
|
|
|
|
|
def pixelfed_postmedia(acc_id, url, ext, accdata=None):
|
|
|
|
return _pixelfed_postmedia(acc_id, pixelfed_cachemedia(url, ext), accdata)
|
|
|
|
|
|
|
|
def _pixelfed_postmedia(acc_id, cachef, accdata=None):
|
2021-02-07 18:48:07 +00:00
|
|
|
if accdata is None:
|
|
|
|
accdata = db_get('accounts', acc_id)
|
|
|
|
_, _token = pixelfed_token_url('', accdata['cookie'])
|
|
|
|
r = requests.post( 'https://'+config()['instance']+'/api/compose/v0/media/upload',\
|
|
|
|
files={'file': open(cachef, 'rb')}, cookies=accdata['cookie'],\
|
|
|
|
headers={
|
|
|
|
'X-CSRF-TOKEN': _token,
|
|
|
|
'X-Requested-With': 'XMLHttpRequest',
|
|
|
|
'X-XSRF-TOKEN': accdata['cookie']['XSRF-TOKEN']
|
|
|
|
}
|
|
|
|
)
|
|
|
|
if r.status_code == 200:
|
|
|
|
return _token, json.loads(r.text)
|
|
|
|
return None, False
|
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
|
2021-02-07 18:48:07 +00:00
|
|
|
# get the image by URL but cache it forever, as if the profile changes the pic
|
|
|
|
# the url will be different, and therefore, the sum will also be different
|
|
|
|
def pixelfed_cacheimg(image_url):
|
2021-02-08 19:28:12 +00:00
|
|
|
return pixelfed_cachemedia(image_url, 'jpg')
|
|
|
|
|
|
|
|
def pixelfed_cachevid(video_url):
|
|
|
|
return pixelfed_cachemedia(image_url, 'mp4')
|
|
|
|
|
|
|
|
def pixelfed_cachemedia(url, ext='jpg'):
|
|
|
|
cachef = './cache/{}.{}'.format(md5sum(url), ext)
|
2021-02-07 13:55:02 +00:00
|
|
|
if not os.path.exists(cachef):
|
2021-02-08 19:28:12 +00:00
|
|
|
r = requests.get(url)
|
2021-02-07 13:55:02 +00:00
|
|
|
w = open(cachef, 'wb')
|
|
|
|
w.write(r.content)
|
|
|
|
w.close()
|
2021-02-07 18:48:07 +00:00
|
|
|
return cachef
|
|
|
|
|
|
|
|
def pixelfed_setpic(acc_id, pic_url, count=0):
|
|
|
|
count += 1
|
|
|
|
pixelfed_login(acc_id)
|
2021-02-07 13:55:02 +00:00
|
|
|
|
2021-02-07 18:48:07 +00:00
|
|
|
cachef = pixelfed_cacheimg(pic_url)
|
2021-02-07 13:55:02 +00:00
|
|
|
accdata = db_get('accounts', acc_id)
|
2021-02-08 09:07:20 +00:00
|
|
|
print('I| setting avatar for "{}" '.format(acc_id), end="")
|
2021-02-07 13:55:02 +00:00
|
|
|
_, _token = pixelfed_token_url('/settings/home', accdata['cookie'])
|
|
|
|
r = requests.post( 'https://'+config()['instance']+'/settings/avatar',\
|
|
|
|
data={'_token': _token}, cookies=accdata['cookie'], files={'avatar': open(cachef, 'rb')}
|
|
|
|
)
|
|
|
|
|
|
|
|
# try to login if the upload failed
|
|
|
|
if r.status_code == 419 and count < 3:
|
2021-02-08 09:07:20 +00:00
|
|
|
print('err (login required)')
|
2021-02-07 13:55:02 +00:00
|
|
|
pixelfed_login(acc_id, True)
|
|
|
|
return pixelfed_setpic(acc_id, pic_url, count)
|
|
|
|
|
2021-02-08 09:07:20 +00:00
|
|
|
print('ok')
|
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
def pixelfed_setinfo(acc_id, bio, website, count=0):
|
2021-02-08 09:56:34 +00:00
|
|
|
accdata = db_get('accounts', acc_id)
|
2021-02-08 11:41:19 +00:00
|
|
|
|
|
|
|
name = accdata['name'] + ' [Mirror]'
|
2021-02-08 09:51:32 +00:00
|
|
|
if count == 0:
|
2021-02-08 10:06:51 +00:00
|
|
|
bio = 'Mirrored from Instagram: instagram.com/{} | {}'.format(acc_id, bio)
|
2021-02-08 09:51:32 +00:00
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
count += 1
|
|
|
|
pixelfed_login(acc_id)
|
|
|
|
|
2021-02-08 09:07:20 +00:00
|
|
|
print('I| setting account-info for "{}" '.format(acc_id), end="")
|
2021-02-07 13:55:02 +00:00
|
|
|
_, _token = pixelfed_token_url('/settings/home', accdata['cookie'])
|
|
|
|
r = requests.post( 'https://'+config()['instance']+'/settings/home',\
|
|
|
|
data={
|
2021-02-08 11:41:19 +00:00
|
|
|
'_token': _token, 'name': name,
|
2021-02-07 13:55:02 +00:00
|
|
|
'website': website, 'bio': bio, 'language': 'en'
|
|
|
|
},
|
|
|
|
cookies=accdata['cookie']
|
|
|
|
)
|
|
|
|
|
|
|
|
# try to login if the upload failed
|
|
|
|
if r.status_code == 419 and count < 3:
|
2021-02-08 09:07:20 +00:00
|
|
|
print('err (login required)')
|
2021-02-07 13:55:02 +00:00
|
|
|
pixelfed_login(acc_id, True)
|
|
|
|
return pixelfed_setinfo(acc_id, bio, website, count)
|
|
|
|
|
2021-02-08 09:07:20 +00:00
|
|
|
print('ok')
|
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
return True
|
|
|
|
|
2021-02-10 19:21:53 +00:00
|
|
|
def pixelfed_htmlfill_mirrors(html):
|
|
|
|
accounts = os.listdir('./db/accounts')
|
|
|
|
mirr_html = ''
|
|
|
|
for acc_id in sorted(set(accounts)):
|
|
|
|
accdata = db_get('accounts', acc_id)
|
|
|
|
mirr_html += """
|
|
|
|
<div class="item">
|
|
|
|
<h3 class="name">{1}</h3>
|
|
|
|
<b>@{0}</b>
|
|
|
|
<div class="links">
|
|
|
|
<div><a href="https://{2}/{0}">Pixelfed</a></div>
|
|
|
|
<div><a href="https://www.instagram.com/{0}">Instagram</a></div>
|
|
|
|
</div>
|
|
|
|
</div>
|
|
|
|
""".format( accdata['username'], htmlesc(accdata['name']), \
|
|
|
|
config()['instance'] )
|
|
|
|
html = html.replace('{mirrors}', mirr_html)
|
|
|
|
html = html.replace('{item_count}', str(len(accounts)) )
|
|
|
|
html = html.replace('{instance}', config()['instance'])
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
|
|
|
def htmlesc(strr):
|
|
|
|
return strr.replace('&', '&')\
|
|
|
|
.replace('<', '<')\
|
|
|
|
.replace('>', '>')
|
2021-02-07 13:55:02 +00:00
|
|
|
|
2021-02-07 01:56:08 +00:00
|
|
|
def random_string(count=32):
|
|
|
|
return ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=count))
|
2021-02-06 20:12:19 +00:00
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
def md5sum(_str):
|
|
|
|
return hashlib.md5(_str.encode()).hexdigest()
|
|
|
|
|
2021-02-06 20:12:19 +00:00
|
|
|
# get all profile data from user:
|
|
|
|
# - display name
|
|
|
|
# - bio description
|
|
|
|
# - shared posts (images/videos)
|
|
|
|
# - much more info...
|
2021-02-07 00:21:58 +00:00
|
|
|
def getig_user_data(acc_id):
|
2021-02-06 20:12:19 +00:00
|
|
|
return json.loads(
|
2021-02-09 11:01:16 +00:00
|
|
|
instagram_get('/{}/?__a=1'.format(acc_id), 1800)
|
2021-02-06 20:12:19 +00:00
|
|
|
)
|
|
|
|
|
2021-02-11 00:09:45 +00:00
|
|
|
def getig_user_fullname(data):
|
|
|
|
if data is None:
|
|
|
|
return '.ERROR.'
|
|
|
|
return re.sub(r'[^a-zA-Z0-9_\s]', '',\
|
|
|
|
data['graphql']['user']['full_name'])
|
|
|
|
|
2021-02-06 20:12:19 +00:00
|
|
|
# runs a basic GET request emulating Tor Browser
|
2021-02-12 14:26:52 +00:00
|
|
|
def instagram_get(url, CACHE_SECS=600, add_headers={}):
|
2021-02-06 20:12:19 +00:00
|
|
|
headers = get_random_headers()
|
|
|
|
default_headers = {
|
|
|
|
'Accept': '*/*',
|
|
|
|
'Accept-Language': 'en-US,en;q=0.5',
|
|
|
|
'Accept-Encoding': 'gzip, deflate, br',
|
|
|
|
'Connection': 'keep-alive',
|
|
|
|
'Cache-Control': 'no-cache',
|
|
|
|
'Pragma': 'no-cache',
|
|
|
|
'Host': 'www.instagram.com',
|
|
|
|
'Referer': 'https://www.instagram.com/',
|
|
|
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0'
|
|
|
|
}
|
|
|
|
for key in default_headers.keys():
|
|
|
|
if not key in headers:
|
|
|
|
headers[key] = default_headers[key]
|
2021-02-12 14:26:52 +00:00
|
|
|
for key in add_headers.keys():
|
|
|
|
headers[key] = add_headers[key]
|
|
|
|
if not '.instagram.com/' in url:
|
|
|
|
url = 'https://www.instagram.com{}'.format(url)
|
2021-02-07 13:55:02 +00:00
|
|
|
cachef = './cache/'+md5sum(url)
|
2021-02-06 20:12:19 +00:00
|
|
|
now = str(time.time())
|
|
|
|
now = int(now[:now.index('.')])
|
|
|
|
if os.path.exists(cachef):
|
|
|
|
cache = readf(cachef).splitlines()
|
|
|
|
ctime = int(cache[0])
|
|
|
|
if now < ctime:
|
|
|
|
return '\n'.join(cache[1:])
|
|
|
|
|
|
|
|
r = requests.get(url, headers=headers)
|
|
|
|
resp = r.text
|
|
|
|
w = open(cachef, 'w')
|
|
|
|
w.write(str(now+CACHE_SECS) + '\n')
|
|
|
|
w.write(resp)
|
|
|
|
return resp
|
|
|
|
|
|
|
|
def get_random_headers():
|
|
|
|
a = os.listdir('./headers')
|
|
|
|
rin = 0
|
|
|
|
if len(a)-1 > 0:
|
|
|
|
rin = random.randint(0, len(a)-1)
|
|
|
|
lines = readf('./headers/{}'.format(a[rin])).splitlines()
|
|
|
|
headers = {}
|
|
|
|
for line in lines:
|
|
|
|
reg = re.search('(^[^:]+):(.*)', line)
|
|
|
|
headers[reg.group(1).strip()] = reg.group(2).strip()
|
|
|
|
return headers
|
|
|
|
|
2021-02-11 00:09:45 +00:00
|
|
|
def account_exists(acc_id):
|
|
|
|
return os.path.exists('./db/accounts/{}'.format(acc_id))
|
|
|
|
|
2021-02-11 19:03:31 +00:00
|
|
|
def account_config(acc_id, key, value):
|
|
|
|
if not account_exists(acc_id):
|
|
|
|
return False, 'Account does not exist: {}'.format(acc_id)
|
|
|
|
|
|
|
|
accdata = db_get('accounts', acc_id)
|
|
|
|
key = key.strip()
|
|
|
|
value = value.strip()
|
|
|
|
|
|
|
|
# make sure passwords or cookie cannot be retrieved
|
|
|
|
if value == '' and not key in ['password', 'cookie']:
|
|
|
|
if not key in accdata:
|
|
|
|
return False, 'Key does not exist yet: {}'.format(key)
|
|
|
|
return True, accdata[key]
|
|
|
|
|
2021-02-11 20:02:29 +00:00
|
|
|
# protect inmutable keys
|
|
|
|
if key in ['name', 'username', 'password', 'cookie']:
|
|
|
|
return False, 'The given key is inmutable: {}'.format(key)
|
|
|
|
|
2021-02-11 19:03:31 +00:00
|
|
|
# value has something, so we set it
|
|
|
|
accdata[key] = value
|
|
|
|
db_set('accounts', acc_id, accdata)
|
|
|
|
return True, 'Account configuration saved: {}'.format(key)
|
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
def db_set(table, acc_id, accdata):
|
|
|
|
w = open('./db/{}/{}'.format(table, acc_id), 'w')
|
|
|
|
w.write(json.dumps(accdata))
|
|
|
|
w.close()
|
|
|
|
|
|
|
|
def db_get(table, acc_id):
|
|
|
|
return json.loads(readf('./db/{}/{}'.format(table, acc_id)))
|
|
|
|
|
2021-02-11 19:13:27 +00:00
|
|
|
def template(name):
|
|
|
|
tplfile = './templates/{}.html'.format(name)
|
|
|
|
if not os.path.exists(tplfile):
|
|
|
|
return False
|
|
|
|
return readf(tplfile)
|
|
|
|
|
2021-02-07 13:55:02 +00:00
|
|
|
def config():
|
|
|
|
global CONFIG
|
|
|
|
if len(CONFIG.keys()) == 0:
|
|
|
|
CONFIG = json.loads(readf('./config.json'))
|
|
|
|
return CONFIG
|
|
|
|
|
2021-02-06 20:12:19 +00:00
|
|
|
def readf(f):
|
|
|
|
r = open(f,'r')
|
|
|
|
c = r.read().strip()
|
|
|
|
r.close()
|
|
|
|
return c
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|