Hi, is there some kind of limitation with sharing folders? I'm trying to share 5.4k folders and I'm getting
PollError('internal_error', None))
consistently. Sometimes the files share, sometimes they don't. I can manually share them no problem and usually I can share them as one offs in a python REPL using the dropbox sdk.
There are no error details.
My code is functionallt simialr to the snippet below:
import dropbox
#censored
access_token = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
dbx = dropbox.Dropbox(access_token)
def get_entries(folder_result_list):
'''using this so we don't try and share the same folder twice'''
entries = []
for entry in folder_result_list.entries:
if not hasattr(entry,'shared_folder_id') and not entry.shared_folder_id:
entries.append(entry)
return entries
folder_result_list = dbx.files_list_folder(parent_folder,include_mounted_folders=False,limit=2000)
entries = get_entries(folder_result_list)
while folder_result_list.has_more:
folder_result_list = dbx.files_list_folder_continue(folder_result_list.cursor)
entries.extend(get_entries(folder_result_list))
#filter for only FolderMetadata so we don't try and share DeletedFolder's or individual files
entry_dict = {entry.id:entry for entry in entries if isinstance(entry,dropbox.files.FolderMetadata)}
for index,keyval in enumerate(entry_dict.items()):
entry_id = keyval[0]
entry = keyval[1]
try:
#forcing async so we get consistent results. checking the job status and doing stuff with the shared_folder_id later on in the script
entry_id_to_job_id[entry_id] = dbx.sharing_share_folder(entry.path_lower,force_async=True).get_async_job_id()
except dropbox.exceptions.ApiError as e:
#do some logging of errors here
pass
The full error looks something like this:
ApiError('0ed9d04d8f01449b5dedea33b7445fee', PollError('internal_error', None))