Add LOCAL_FILE_UPLOAD param

This commit is contained in:
Son NK 2020-03-13 14:37:48 +01:00
parent cf257a92ec
commit a9fdfc799f
4 changed files with 56 additions and 23 deletions

3
.gitignore vendored
View File

@ -7,4 +7,5 @@ db.sqlite
.DS_Store
config
static/node_modules
db.sqlite-journal
db.sqlite-journal
static/upload

View File

@ -197,3 +197,14 @@ JOB_ONBOARDING_1 = "onboarding-1"
# for pagination
PAGE_LIMIT = 20
# Upload to static/upload instead of s3
LOCAL_FILE_UPLOAD = "LOCAL_FILE_UPLOAD" in os.environ
UPLOAD_DIR = None
if LOCAL_FILE_UPLOAD:
print("Upload files to local dir")
UPLOAD_DIR = os.path.join(ROOT_DIR, "static/upload")
if not os.path.exists(UPLOAD_DIR):
print("Create upload dir")
os.makedirs(UPLOAD_DIR)

View File

@ -1,38 +1,55 @@
from io import BytesIO
import os
import boto3
import requests
from app.config import AWS_REGION, BUCKET, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY
_session = boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
from app.config import (
AWS_REGION,
BUCKET,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
LOCAL_FILE_UPLOAD,
UPLOAD_DIR,
URL,
)
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string") -> None:
bs.seek(0)
_session.resource("s3").Bucket(BUCKET).put_object(
Key=key, Body=bs, ContentType=content_type
if not LOCAL_FILE_UPLOAD:
_session = boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
)
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
bs.seek(0)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, key)
file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f:
f.write(bs.read())
else:
_session.resource("s3").Bucket(BUCKET).put_object(
Key=key, Body=bs, ContentType=content_type
)
def upload_from_url(url: str, upload_path):
r = requests.get(url)
upload_from_bytesio(upload_path, BytesIO(r.content))
def delete_file(key: str) -> None:
o = _session.resource("s3").Bucket(BUCKET).Object(key)
o.delete()
def get_url(key: str, expires_in=3600) -> str:
s3_client = _session.client("s3")
return s3_client.generate_presigned_url(
ExpiresIn=expires_in,
ClientMethod="get_object",
Params={"Bucket": BUCKET, "Key": key},
)
if LOCAL_FILE_UPLOAD:
return URL + "/static/upload/" + key
else:
s3_client = _session.client("s3")
return s3_client.generate_presigned_url(
ExpiresIn=expires_in,
ClientMethod="get_object",
Params={"Bucket": BUCKET, "Key": key},
)

View File

@ -113,3 +113,7 @@ FACEBOOK_CLIENT_SECRET=to_fill
# Where to store GPG Keyring
# GNUPGHOME=/tmp/gnupg
# By default, files are uploaded to s3
# Set this variable to use the local "static/upload/" directory instead
# LOCAL_FILE_UPLOAD=true