@sergei.terentev thanks for answering each questions.
I’ve tried with data
folder, and merge/join works on Swagger correctly.
The only issue remains is that through python SDK API call, it still returns
{
"error": "404 Client Error: Not Found for url: http://dev.groupdocs-merger/v1.0/merger/join"
}
However same API works from Swagger.
Here’s the required information you have asked for:
- We are using
NO
library as of now. Simple API call. Earlier for cloud, we were using - groupdocs-merger-cloud==24.11
in requirements.txt
Function to call groupdocs.merger API:
import requests
def join_documents(logger, payload):
url = "http://dev.groupdocs-merger/v1.0/merger/join" # Todo: Pick from ENV
headers = {
"accept": "application/json",
"Content-Type": "application/json"
}
try:
response = requests.post(url, json=payload, headers=headers)
response.raise_for_status()
res = response.json()
logger.debug(f"Response from groupdocs.merger: {res}")
return res
except requests.RequestException as e:
logger.error(f"Groupdocs merger: Error occurred: {e}")
raise e
Code calling above API:
@with_logger('pptx-merge-docker')
class PptxDockerMerge:
def on_post(self, req: falcon.Request, resp: falcon.Response) -> None:
self.logger.info("Received pptx merge request")
try:
body = req.media
S3 = S3Utility()
tenant_id = body.get("tenant_id", "")
efs_input_dir = f"/data/{tenant_id}/input"
efs_output_dir = f"/data/{tenant_id}/output"
os.makedirs(efs_input_dir, exist_ok=True)
os.makedirs(efs_output_dir, exist_ok=True)
files = body.get("files", [])
merged_file_name = body.get("merged_file_name", f"merged_{uuid4().hex}.pptx")
merged_file_path = f"{efs_output_dir}/{merged_file_name}"
upload_path = body.get("upload_path", None)
self.logger.info(f"Merging {len(files)} slides.")
temp_dir = tempfile.mkdtemp()
downloaded_files = []
input_join_req = {
"JoinItems": [],
"OutputPath": merged_file_path
}
for file in files:
s3_path = file.get('s3_path')
file_name = os.path.basename(s3_path)
local_path = os.path.join(temp_dir, file_name)
S3.download_file_from_s3(s3_path, local_path)
# Move to EFS /data/input/
efs_target_path = os.path.join(efs_input_dir, file_name)
shutil.copyfile(local_path, efs_target_path)
downloaded_files.append(local_path)
input_join_req["JoinItems"].append({
"Pages": file.get('pages', []),
"FileInfo": {
"FilePath": efs_target_path,
"StorageName": ""
}
})
self.logger.info(f"Downloaded and moved {file_name} to {efs_target_path}")
# Check files in EFS once
pptx_files = glob.glob(f"/data/{tenant_id}/input/*.pptx")
self.logger.debug(f"Files in /data/{tenant_id}/input: {pptx_files}")
# Call groupdocs.merger API
join_documents(logger=self.logger, payload=input_join_req)
self.logger.debug(f"Merged docs at: {efs_target_path}. Uploading to S3")
S3.upload_to_s3(efs_target_path, upload_path)
self.logger.info(f"Uploaded merged pptx on {upload_path}")
# Todo: Remove files from EFS
resp.status = falcon.HTTP_200
resp.media = {
"message": "Merged PPTX uploaded successfully",
"upload_path": upload_path
}
except Exception as e:
self.logger.exception("Failed to merge PPTX slides")
resp.status = falcon.HTTP_500
resp.media = {"error": str(e)}