Skip to content

Commit

Permalink
Merge pull request #674 from seatable/add-app-import-template-api
Browse files Browse the repository at this point in the history
add import-big-data-screen-io-task
  • Loading branch information
shuntian authored Oct 12, 2024
2 parents 6137b5b + edd08c0 commit e76595a
Show file tree
Hide file tree
Showing 4 changed files with 131 additions and 1 deletion.
22 changes: 21 additions & 1 deletion dtable_events/dtable_io/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from dtable_events.app.config import DTABLE_WEB_SERVICE_URL
from dtable_events.dtable_io.big_data import import_excel_to_db, update_excel_to_db, export_big_data_to_excel, \
export_app_table_page_to_excel
from dtable_events.dtable_io.utils import setup_logger, \
from dtable_events.dtable_io.utils import post_big_data_screen_app_zip_file, setup_logger, \
prepare_asset_file_folder, post_dtable_json, post_asset_files, \
download_files_to_path, create_forms_from_src_dtable, copy_src_forms_to_json, \
prepare_dtable_json_from_memory, update_page_design_static_image, \
Expand Down Expand Up @@ -300,6 +300,26 @@ def import_big_data_screen(username, repo_id, dtable_uuid, page_id):
except Exception as e:
dtable_io_logger.error('rm extracted tmp file failed. ERROR: {}'.format(e))


def import_big_data_screen_app(username, repo_id, dtable_uuid, app_uuid, app_id, config):
"""
parse the zip in tmp folders and upload it
"""
tmp_extracted_path = os.path.join('/tmp/dtable-io', dtable_uuid, 'big_data_screen_zip_extracted/')
db_session = init_db_session_class(config)()
try:
post_big_data_screen_app_zip_file(username, repo_id, dtable_uuid, app_uuid, app_id, tmp_extracted_path, db_session)
except Exception as e:
dtable_io_logger.exception('import big data screen from dtable failed. ERROR: {}'.format(e))
else:
dtable_io_logger.info('import big data screen to dtable: %s success!', dtable_uuid)
finally:
db_session.close()
try:
shutil.rmtree(tmp_extracted_path)
except Exception as e:
dtable_io_logger.error('rm extracted tmp file failed. ERROR: {}'.format(e))

def parse_excel_csv(username, repo_id, file_name, file_type, parse_type, dtable_uuid, config):
"""
parse excel or csv to json file, then upload json file to file server
Expand Down
29 changes: 29 additions & 0 deletions dtable_events/dtable_io/request_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,35 @@ def add_big_data_screen_import_task():
return make_response(({'task_id': task_id}, 200))


@app.route('/add-big-data-screen-app-import-task', methods=['GET'])
def add_big_data_screen_app_import_task():
is_valid, error = check_auth_token(request)
if not is_valid:
return make_response((error, 403))

if task_manager.tasks_queue.full():
from dtable_events.dtable_io import dtable_io_logger
dtable_io_logger.warning('dtable io server busy, queue size: %d, current tasks: %s, threads is_alive: %s'
% (task_manager.tasks_queue.qsize(), task_manager.current_task_info,
task_manager.threads_is_alive()))
return make_response(('dtable io server busy.', 400))

username = request.args.get('username')
repo_id = request.args.get('repo_id')
dtable_uuid = request.args.get('dtable_uuid')
app_uuid = request.args.get('app_uuid')
app_id = request.args.get('app_id')

try:
task_id = task_manager.add_import_dtable_big_data_screen_app_task(
username, repo_id, dtable_uuid, app_uuid, app_id)
except Exception as e:
logger.error(e)
return make_response((e, 500))

return make_response(({'task_id': task_id}, 200))


@app.route('/add-parse-excel-csv-task', methods=['GET'])
def add_parse_excel_csv_task():
is_valid, error = check_auth_token(request)
Expand Down
9 changes: 9 additions & 0 deletions dtable_events/dtable_io/task_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,15 @@ def add_import_dtable_big_data_screen_task(self, username, repo_id, dtable_uuid,
self.tasks_map[task_id] = task
return task_id

def add_import_dtable_big_data_screen_app_task(self, username, repo_id, dtable_uuid, app_uuid, app_id):
from dtable_events.dtable_io import import_big_data_screen_app
task_id = str(uuid.uuid4())
task = (import_big_data_screen_app,
(username, repo_id, dtable_uuid, app_uuid, app_id, self.config))
self.tasks_queue.put(task_id)
self.tasks_map[task_id] = task
return task_id


def add_transfer_dtable_asset_files_task(self, username, repo_id, dtable_uuid, files, files_map, parent_dir, relative_path, replace, repo_api_token, seafile_server_url):
from dtable_events.dtable_io import get_dtable_transfer_asset_files
Expand Down
72 changes: 72 additions & 0 deletions dtable_events/dtable_io/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1438,6 +1438,78 @@ def post_big_data_screen_zip_file(username, repo_id, dtable_uuid, page_id, tmp_e
seafile_api.post_file(repo_id, tmp_image_path, current_image_path, image_name, username)


def post_big_data_screen_app_zip_file(username, repo_id, dtable_uuid, app_uuid, app_id, tmp_extracted_path, db_session):

content_json_file_path = os.path.join(tmp_extracted_path, 'content.json')
content_poster_file_path = os.path.join(tmp_extracted_path, 'content.png')
new_content_poster_file_path = os.path.join(tmp_extracted_path, '%s.png' % app_id)
poster_file_name = os.path.basename(new_content_poster_file_path)
os.rename(content_poster_file_path, new_content_poster_file_path)
image_path = os.path.join(tmp_extracted_path, 'images/')
with open(content_json_file_path, 'r') as f:
content_json = f.read()
try:
content = json.loads(content_json)
except:
content = {}


base_dir = '/asset/' + dtable_uuid
# big_data_file_path = 'files/plugins/big-data-screen/%(page_id)s/' % ({
# 'page_id': page_id,
# })
big_data_file_path = 'external-apps/big-data-screen/%(app_id)s/' % {
'app_id': app_id
}
# image_file_path = 'files/plugins/big-data-screen/bg_images/'
image_file_path = '%(big_data_file_path)s/bg_images/' % {
'big_data_file_path': big_data_file_path.strip('/')
}
current_file_path = os.path.join(base_dir, big_data_file_path)
current_image_path = os.path.join(base_dir, image_file_path)

# 1. handle page_content
page_content_dict = content.get('page_content')
page_content_dict['app_id'] = app_id # update app_id
page_content_dict['app_uuid'] = app_uuid # update app_uuid
# tmp_page_json_path = os.path.join(tmp_extracted_path, '%s.json' % page_id)
# with open(tmp_page_json_path, 'wb') as f:
# f.write(json.dumps(page_content_dict).encode('utf-8'))
sql = "SELECT app_config FROM dtable_external_apps WHERE id=:app_id"
app_config = json.loads(db_session.execute(text(sql), {'app_id': app_id}).fetchone().app_config)
app_config['settings'] = page_content_dict
sql = "UPDATE dtable_external_apps SET app_config=:app_config WHERE id=:app_id"
db_session.execute(text(sql), {
'app_config': json.dumps(app_config).encode('utf-8'),
'app_id': app_id
})
db_session.commit()

path_id = seafile_api.get_dir_id_by_path(repo_id, current_file_path)
if not path_id:
seafile_api.mkdir_with_parents(repo_id, '/', current_file_path[1:], username)
# file_name = os.path.basename(tmp_page_json_path)
# dtable_file_id = seafile_api.get_file_id_by_path(
# repo_id, current_file_path + file_name)
# if dtable_file_id:
# seafile_api.del_file(repo_id, current_file_path, json.dumps([file_name]), '')
# seafile_api.post_file(repo_id, tmp_page_json_path, current_file_path, file_name, username)
seafile_api.post_file(repo_id, new_content_poster_file_path, current_file_path, poster_file_name, username)

# 2. handle images
image_path_id = seafile_api.get_dir_id_by_path(repo_id, current_image_path)
if not image_path_id:
seafile_api.mkdir_with_parents(repo_id, '/', current_image_path[1:], username)
for dirpath, _, filenames in os.walk(image_path):
for image_name in filenames:
tmp_image_path = os.path.join(dirpath, image_name)
dtable_file_id = seafile_api.get_file_id_by_path(
repo_id, current_image_path + image_name
)
if not dtable_file_id:
seafile_api.post_file(repo_id, tmp_image_path, current_image_path, image_name, username)


def escape_sheet_name(text):
# invalid_title_regex is from openpyxl
invalid_title_regex = re.compile(r'[\\*?:/\[\]]')
Expand Down

0 comments on commit e76595a

Please sign in to comment.