Merge pull request #108 from simple-login/local-upload

Add LOCAL_FILE_UPLOAD param
This commit is contained in:
Son Nguyen Kim 2020-03-13 18:36:42 +01:00 committed by Son NK
parent 0c2b16185f
commit 9eae97ccba
3 changed files with 54 additions and 22 deletions

View File

@ -197,3 +197,14 @@ JOB_ONBOARDING_1 = "onboarding-1"
# for pagination # for pagination
PAGE_LIMIT = 20 PAGE_LIMIT = 20
# Upload to static/upload instead of s3
LOCAL_FILE_UPLOAD = "LOCAL_FILE_UPLOAD" in os.environ
UPLOAD_DIR = None
if LOCAL_FILE_UPLOAD:
print("Upload files to local dir")
UPLOAD_DIR = os.path.join(ROOT_DIR, "static/upload")
if not os.path.exists(UPLOAD_DIR):
print("Create upload dir")
os.makedirs(UPLOAD_DIR)

View File

@ -1,38 +1,55 @@
from io import BytesIO from io import BytesIO
import os
import boto3 import boto3
import requests import requests
from app.config import AWS_REGION, BUCKET, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY from app.config import (
AWS_REGION,
_session = boto3.Session( BUCKET,
aws_access_key_id=AWS_ACCESS_KEY_ID, AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY, AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION, LOCAL_FILE_UPLOAD,
UPLOAD_DIR,
URL,
) )
if not LOCAL_FILE_UPLOAD:
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string") -> None: _session = boto3.Session(
bs.seek(0) aws_access_key_id=AWS_ACCESS_KEY_ID,
_session.resource("s3").Bucket(BUCKET).put_object( aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
Key=key, Body=bs, ContentType=content_type region_name=AWS_REGION,
) )
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
bs.seek(0)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, key)
file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f:
f.write(bs.read())
else:
_session.resource("s3").Bucket(BUCKET).put_object(
Key=key, Body=bs, ContentType=content_type
)
def upload_from_url(url: str, upload_path): def upload_from_url(url: str, upload_path):
r = requests.get(url) r = requests.get(url)
upload_from_bytesio(upload_path, BytesIO(r.content)) upload_from_bytesio(upload_path, BytesIO(r.content))
def delete_file(key: str) -> None:
o = _session.resource("s3").Bucket(BUCKET).Object(key)
o.delete()
def get_url(key: str, expires_in=3600) -> str: def get_url(key: str, expires_in=3600) -> str:
s3_client = _session.client("s3") if LOCAL_FILE_UPLOAD:
return s3_client.generate_presigned_url( return URL + "/static/upload/" + key
ExpiresIn=expires_in, else:
ClientMethod="get_object", s3_client = _session.client("s3")
Params={"Bucket": BUCKET, "Key": key}, return s3_client.generate_presigned_url(
) ExpiresIn=expires_in,
ClientMethod="get_object",
Params={"Bucket": BUCKET, "Key": key},
)

View File

@ -113,3 +113,7 @@ FACEBOOK_CLIENT_SECRET=to_fill
# Where to store GPG Keyring # Where to store GPG Keyring
# GNUPGHOME=/tmp/gnupg # GNUPGHOME=/tmp/gnupg
# By default, files are uploaded to s3
# Set this variable to use the local "static/upload/" directory instead
# LOCAL_FILE_UPLOAD=true