Merged in seth/jamtrack-lambda-importer (pull request #51)
Lambda & local importer for jamtracks * # This is a combination of 9 commits. # This is the 1st commit message: inital attempt before upgrading Rails in lambda # The commit message #2 will be skipped: # unzip works # The commit message #3 will be skipped: # wip # The commit message #4 will be skipped: # Add in gemspec # The commit message #5 will be skipped: # wip # The commit message #6 will be skipped: # imported one locally # The commit message #7 will be skipped: # Add in jmep # The commit message #8 will be skipped: # add in some baked in env vars # The commit message #9 will be skipped: # ran a local container build finally * inital attempt before upgrading Rails in lambda * Minor tweak for docker usage case of lambda running in AWS/lambda
This commit is contained in:
parent
55f6839521
commit
f26733fa46
|
|
@ -0,0 +1,5 @@
|
|||
# This file is auto generated by SAM CLI build command
|
||||
|
||||
[function_build_definitions]
|
||||
|
||||
[layer_build_definitions]
|
||||
|
|
@ -0,0 +1,179 @@
|
|||
AWSTemplateFormatVersion: '2010-09-09'
|
||||
Transform: AWS::Serverless-2016-10-31
|
||||
Parameters:
|
||||
Environment:
|
||||
Type: String
|
||||
AllowedValues:
|
||||
- dev
|
||||
- prod
|
||||
Description: The environment (e.g., dev or prod)
|
||||
TencyZipsBucket:
|
||||
Type: String
|
||||
Description: Where tency uploads their original zips
|
||||
TencyJamTracksBucket:
|
||||
Type: String
|
||||
Description: Where we unzip their zipped files
|
||||
EfsId:
|
||||
Type: String
|
||||
Description: The ID of the EFS to use for scratch
|
||||
VpcId:
|
||||
Type: String
|
||||
Description: The ID of the VPC where the Lambda function and EFS are deployed.
|
||||
SubnetIds:
|
||||
Type: CommaDelimitedList
|
||||
Description: The IDs of the subnets where the Lambda function will be deployed.
|
||||
SgIds:
|
||||
Type: CommaDelimitedList
|
||||
Description: The Id Of the security group
|
||||
MountPath:
|
||||
Type: String
|
||||
Description: The path to mount the EFS volume into the lamdda
|
||||
JamTrackContainerPath:
|
||||
Type: String
|
||||
Description: The local or container registry path to the jamtrack container
|
||||
DbHost:
|
||||
Type: String
|
||||
Description: potsgresql host
|
||||
DbUser:
|
||||
Type: String
|
||||
Description: postgresql user
|
||||
DbPass:
|
||||
Type: String
|
||||
Description: postgresql pass
|
||||
DbName:
|
||||
Type: String
|
||||
Description: db name
|
||||
AwsBucket:
|
||||
Type: String
|
||||
Description: aws bucket
|
||||
AwsBucketPublic:
|
||||
Type: String
|
||||
Description: aws bucket public
|
||||
Globals:
|
||||
Function:
|
||||
CodeUri: ./
|
||||
Architectures:
|
||||
- x86_64
|
||||
Resources:
|
||||
TencyUnzipFunction:
|
||||
Type: AWS::Serverless::Function
|
||||
Properties:
|
||||
MemorySize: 500
|
||||
Timeout: 900
|
||||
PackageType: Image
|
||||
ImageUri:
|
||||
Ref: JamTrackContainerPath
|
||||
FileSystemConfigs:
|
||||
- Arn:
|
||||
Fn::GetAtt:
|
||||
- EFSMountTarget
|
||||
- Arn
|
||||
LocalMountPath: /mnt/efs
|
||||
VpcConfig:
|
||||
SubnetIds:
|
||||
Ref: SubnetIds
|
||||
SecurityGroupIds:
|
||||
Ref: SgIds
|
||||
Role:
|
||||
Fn::GetAtt:
|
||||
- ZipExtractorFunctionRole
|
||||
- Arn
|
||||
Environment:
|
||||
Variables:
|
||||
ENV:
|
||||
Ref: Environment
|
||||
DB_HOST:
|
||||
Ref: DbHost
|
||||
DB_USER:
|
||||
Ref: DbUser
|
||||
DB_PASS:
|
||||
Ref: DbPass
|
||||
DB_NAME:
|
||||
Ref: DbName
|
||||
TENCY_ZIPS_BUCKET:
|
||||
Ref: TencyZipsBucket
|
||||
TENCY_JAMTRACKS_BUCKET:
|
||||
Ref: TencyJamTracksBucket
|
||||
AWS_BUCKET:
|
||||
Ref: AwsBucket
|
||||
AWS_BUCKET_PUBLIC:
|
||||
Ref: AwsBucketPublic
|
||||
FFMPEG_PATH: /opt/bin/ffmpeg
|
||||
FFMPEG_PATH_MP3: /opt/bin/ffmpeg
|
||||
JMEP_DIR: /var/task/shared/jmep
|
||||
END_ON_FAIL: 1
|
||||
MOUNT_PATH:
|
||||
Ref: MountPath
|
||||
ZipExtractorFunctionRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: lambda.amazonaws.com
|
||||
Action: sts:AssumeRole
|
||||
ManagedPolicyArns:
|
||||
- Ref: EFSFullAccessPolicy
|
||||
- arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole
|
||||
Policies:
|
||||
- PolicyName: S3AccessPolicy
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- s3:ListBucket
|
||||
- s3:GetObject
|
||||
Resource:
|
||||
- Fn::Sub: arn:aws:s3:::${TencyZipsBucket}
|
||||
- Fn::Sub: arn:aws:s3:::${TencyZipsBucket}/*
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- s3:ListBucket
|
||||
- s3:GetObject
|
||||
- s3:PutObject
|
||||
Resource:
|
||||
- Fn::Sub: arn:aws:s3:::${TencyJamTracksBucket}
|
||||
- Fn::Sub: arn:aws:s3:::${TencyJamTracksBucket}/*
|
||||
- PolicyName: VPCNetworkingPolicy
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- ec2:CreateNetworkInterface
|
||||
- ec2:DescribeNetworkInterfaces
|
||||
- ec2:DeleteNetworkInterface
|
||||
Resource: '*'
|
||||
EFSFullAccessPolicy:
|
||||
Type: AWS::IAM::ManagedPolicy
|
||||
Properties:
|
||||
Description: EFS full access for Lambda
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- elasticfilesystem:ClientMount
|
||||
- elasticfilesystem:ClientWrite
|
||||
Resource:
|
||||
Fn::GetAtt:
|
||||
- EFSMountTarget
|
||||
- Arn
|
||||
EFSMountTarget:
|
||||
Type: AWS::EFS::AccessPoint
|
||||
Properties:
|
||||
FileSystemId:
|
||||
Ref: EfsId
|
||||
PosixUser:
|
||||
Uid: '1000'
|
||||
Gid: '1000'
|
||||
RootDirectory:
|
||||
CreationInfo:
|
||||
OwnerUid: '1000'
|
||||
OwnerGid: '1000'
|
||||
Permissions: '0777'
|
||||
Path:
|
||||
Ref: MountPath
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
BUNDLE_PATH: "vendor/bundle"
|
||||
BUNDLE_BUILD__PG: "--with-cppflags=-I/usr/include/openssl --with-ldflags=-L/usr/lib64"
|
||||
BUNDLE_WITHOUT: "development:test"
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
vendor
|
||||
tmp
|
||||
mapped
|
||||
assets
|
||||
aws-credentials
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
FROM index.docker.io/library/ruby:3.3
|
||||
|
||||
# Install the runtime interface client for Ruby
|
||||
#RUN gem install aws_lambda_ric
|
||||
|
||||
# Add the runtime interface client to the PATH
|
||||
ENV PATH="/usr/local/bundle/bin:/opt/bin:${PATH}"
|
||||
|
||||
RUN cat /etc/ssl/openssl.cnf
|
||||
|
||||
# https://stackoverflow.com/questions/77812112/what-to-do-if-cipherstring-defaultseclevel-1-in-openssl-3-configuration-file
|
||||
# to interop with our old Ubuntu 12 machines..
|
||||
RUN sed -i '/\[openssl_init\]/a ssl_conf = ssl_configuration' /etc/ssl/openssl.cnf
|
||||
RUN echo "\n[ssl_configuration]" >> /etc/ssl/openssl.cnf \
|
||||
&& echo "system_default = tls_system_default" >> /etc/ssl/openssl.cnf
|
||||
RUN echo "\n[tls_system_default]" >> /etc/ssl/openssl.cnf \
|
||||
&& echo "MinProtocol = TLSv1" >> /etc/ssl/openssl.cnf \
|
||||
&& echo "CipherString = DEFAULT@SECLEVEL=0" >> /etc/ssl/openssl.cnf
|
||||
|
||||
# Create a directory for the Lambda function
|
||||
ENV LAMBDA_TASK_ROOT=/var/task
|
||||
RUN mkdir -p ${LAMBDA_TASK_ROOT}
|
||||
WORKDIR ${LAMBDA_TASK_ROOT}
|
||||
|
||||
|
||||
ENV ARTIFACT_DIR=/opt
|
||||
ENV FUNCTION_DIR=${LAMBDA_TASK_ROOT}
|
||||
|
||||
# Install dependencies
|
||||
RUN apt update -y && apt install -y postgresql-common unzip vorbis-tools pip sox python-is-python3
|
||||
# Copy custom libraries
|
||||
COPY ./assets/bin/ffmpeg ${ARTIFACT_DIR}/bin/
|
||||
RUN chmod a+x /opt/bin/ffmpeg
|
||||
|
||||
RUN git clone http://www.pogo.org.uk/~mark/bpm-tools.git && cd bpm-tools && make && cp bpm /opt/bin/
|
||||
|
||||
# Copy just enough of the shared gem to satisfy bundle install when it runs.
|
||||
# This way we can make code changes easily in `shared/lib/*`, and not create
|
||||
# full docker rebuilds for speed
|
||||
RUN mkdir -p ${LAMBDA_TASK_ROOT}/shared
|
||||
COPY shared/shared.gemspec ${LAMBDA_TASK_ROOT}/shared/
|
||||
|
||||
COPY lambdas ${LAMBDA_TASK_ROOT}/lambdas
|
||||
|
||||
RUN ls -laR ${LAMBDA_TASK_ROOT}
|
||||
|
||||
WORKDIR ${LAMBDA_TASK_ROOT}/lambdas/unzipper
|
||||
# Copy Gemfile and Gemfile.lock
|
||||
RUN ls -la
|
||||
RUN gem install bundler && \
|
||||
bundle config set --local path "/vendor/bundle" && \
|
||||
bundle install
|
||||
|
||||
# Copy application code
|
||||
COPY shared ${LAMBDA_TASK_ROOT}/shared
|
||||
#RUN bundle config set --local deployment 'true' && bundle install
|
||||
|
||||
WORKDIR ${LAMBDA_TASK_ROOT}/lambdas/unzipper
|
||||
|
||||
# Set runtime interface client as default command for the container runtime
|
||||
#ENTRYPOINT [ "aws_lambda_ric" ]
|
||||
|
||||
ENTRYPOINT [ "bundle", "exec", "ruby", "app.rb" ]
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
FROM ruby:3.3
|
||||
|
||||
# Install the runtime interface client for Ruby
|
||||
RUN gem install aws_lambda_ric
|
||||
|
||||
# Add the runtime interface client to the PATH
|
||||
ENV PATH="/usr/local/bundle/bin:/opt/bin:${PATH}"
|
||||
|
||||
# Create a directory for the Lambda function
|
||||
ENV LAMBDA_TASK_ROOT=/var/task
|
||||
RUN mkdir -p ${LAMBDA_TASK_ROOT}
|
||||
WORKDIR ${LAMBDA_TASK_ROOT}
|
||||
|
||||
ENV ARTIFACT_DIR=/opt
|
||||
ENV FUNCTION_DIR=${LAMBDA_TASK_ROOT}
|
||||
|
||||
# Install dependencies
|
||||
RUN apt update -y && apt install -y postgresql-common unzip vorbis-tools pip sox python-is-python3
|
||||
# Copy custom libraries
|
||||
COPY ./assets/bin/ffmpeg ${ARTIFACT_DIR}/bin/
|
||||
RUN chmod a+x /opt/bin/ffmpeg
|
||||
|
||||
RUN git clone http://www.pogo.org.uk/~mark/bpm-tools.git && cd bpm-tools && make && cp bpm /opt/bin/
|
||||
|
||||
# Copy just enough of the shared gem to satisfy bundle install when it runs.
|
||||
# This way we can make code changes easily in `shared/lib/*`, and not create
|
||||
# full docker rebuilds for speed
|
||||
RUN mkdir -p ${LAMBDA_TASK_ROOT}/shared
|
||||
COPY shared/shared.gemspec ${LAMBDA_TASK_ROOT}/shared/
|
||||
|
||||
COPY lambdas ${LAMBDA_TASK_ROOT}/lambdas
|
||||
|
||||
RUN ls -laR ${LAMBDA_TASK_ROOT}
|
||||
|
||||
WORKDIR ${LAMBDA_TASK_ROOT}/lambdas/unzipper
|
||||
# Copy Gemfile and Gemfile.lock
|
||||
RUN ls -la
|
||||
RUN gem install bundler && \
|
||||
bundle config set --local path "/vendor/bundle" && \
|
||||
bundle install
|
||||
|
||||
# Copy application code
|
||||
COPY shared ${LAMBDA_TASK_ROOT}/shared
|
||||
#RUN bundle config set --local deployment 'true' && bundle install
|
||||
|
||||
WORKDIR ${LAMBDA_TASK_ROOT}/lambdas/unzipper
|
||||
|
||||
# Set runtime interface client as default command for the container runtime
|
||||
ENTRYPOINT [ "aws_lambda_ric" ]
|
||||
|
||||
# Set the Lambda handler
|
||||
CMD ["app.lambda_handler"]
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
source "https://rubygems.org"
|
||||
#gem "protected_attributes"
|
||||
gem "activerecord"# "= 4.2.8" # or your Rails version
|
||||
gem "aws-sdk-lambda"
|
||||
gem "json" #, "1.8.6"
|
||||
gem "pg"
|
||||
gem "ox"
|
||||
gem "logging"
|
||||
gem 'aws-sdk-s3', '~> 1' # , '~> 1'
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activemodel (8.0.1)
|
||||
activesupport (= 8.0.1)
|
||||
activerecord (8.0.1)
|
||||
activemodel (= 8.0.1)
|
||||
activesupport (= 8.0.1)
|
||||
timeout (>= 0.4.0)
|
||||
activesupport (8.0.1)
|
||||
base64
|
||||
benchmark (>= 0.3)
|
||||
bigdecimal
|
||||
concurrent-ruby (~> 1.0, >= 1.3.1)
|
||||
connection_pool (>= 2.2.5)
|
||||
drb
|
||||
i18n (>= 1.6, < 2)
|
||||
logger (>= 1.4.2)
|
||||
minitest (>= 5.1)
|
||||
securerandom (>= 0.3)
|
||||
tzinfo (~> 2.0, >= 2.0.5)
|
||||
uri (>= 0.13.1)
|
||||
aws-eventstream (1.3.0)
|
||||
aws-partitions (1.1029.0)
|
||||
aws-sdk-core (3.214.1)
|
||||
aws-eventstream (~> 1, >= 1.3.0)
|
||||
aws-partitions (~> 1, >= 1.992.0)
|
||||
aws-sigv4 (~> 1.9)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.96.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-lambda (1.144.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-s3 (1.176.1)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sigv4 (1.10.1)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
base64 (0.2.0)
|
||||
benchmark (0.4.0)
|
||||
bigdecimal (3.1.9)
|
||||
concurrent-ruby (1.3.4)
|
||||
connection_pool (2.4.1)
|
||||
drb (2.2.1)
|
||||
i18n (1.14.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jmespath (1.6.2)
|
||||
json (2.9.1)
|
||||
little-plugger (1.1.4)
|
||||
logger (1.6.4)
|
||||
logging (2.4.0)
|
||||
little-plugger (~> 1.1)
|
||||
multi_json (~> 1.14)
|
||||
minitest (5.25.4)
|
||||
multi_json (1.15.0)
|
||||
ox (2.14.19)
|
||||
bigdecimal (>= 3.0)
|
||||
pg (1.5.9)
|
||||
securerandom (0.4.1)
|
||||
timeout (0.4.3)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
uri (1.0.2)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
x86_64-darwin-20
|
||||
|
||||
DEPENDENCIES
|
||||
activerecord
|
||||
aws-sdk-lambda
|
||||
aws-sdk-s3 (~> 1)
|
||||
json
|
||||
logging
|
||||
ox
|
||||
pg
|
||||
|
||||
BUNDLED WITH
|
||||
2.5.23
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
Using this to create a static build of ffmpeg with lib-fdkaac available
|
||||
https://github.com/zimbatm/ffmpeg-static
|
||||
|
||||
# manually pushed a zip by cd'ing into assets, and pushing bin/ffmpeg, bin/oggenc, and bin/sox into s3 to be used as sam layer
|
||||
s3 cp assets.zip s3://jamkazam-repo/lambda-assets/jamtrack-importer-assets.zip
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
results
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
You need parallels and AWS_SECRET and AWS_KEY defined in your environment for this to succeed locally.
|
||||
|
||||
run_single_example.sh runs a single jamtrack locally. Assumes you've run scripts/build-container-image to create an image
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# Used to kick off the big job (i.e., provide a manifest of jamtracks to import)
|
||||
|
||||
RESULT_DIR=$(date +%Y-%m-%d_%H-%M-%S)
|
||||
RESULT_DIR=results/$RESULT_DIR/
|
||||
parallel -j 4 --joblog joblog.txt --results $RESULT_DIR {} < $1
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def load_env_vars(json_file):
|
||||
"""
|
||||
Load environment variables from a JSON file and set them in the current process.
|
||||
"""
|
||||
try:
|
||||
with open(json_file, 'r') as f:
|
||||
env_vars = json.load(f)
|
||||
|
||||
if not isinstance(env_vars, dict):
|
||||
raise ValueError("JSON file must contain key-value pairs.")
|
||||
|
||||
# Set each key-value pair as an environment variable
|
||||
for key, value in env_vars.items():
|
||||
os.environ[key] = str(value)
|
||||
print(f"Set environment variable: {key}={value}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"Error: File not found - {json_file}")
|
||||
sys.exit(1)
|
||||
except json.JSONDecodeError:
|
||||
print(f"Error: Failed to parse JSON file - {json_file}")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def run_docker(image, env_vars, docker_args, jam_track_id):
|
||||
"""
|
||||
Run a Docker container with the specified image, environment variables, and additional arguments.
|
||||
"""
|
||||
try:
|
||||
# Build the Docker run command with environment variables
|
||||
docker_cmd = ["docker", "run", "--rm"]
|
||||
for key, value in env_vars.items():
|
||||
docker_cmd.extend(["-e", f"{key}={value}"])
|
||||
|
||||
docker_cmd.extend(["-e", f"JAM_TRACK_ID={jam_track_id}"])
|
||||
# Add the Docker image and additional arguments
|
||||
docker_cmd.append(image)
|
||||
docker_cmd.extend(docker_args)
|
||||
|
||||
# Execute the Docker run command
|
||||
print(f"Running Docker command: {' '.join(docker_cmd)}")
|
||||
process = subprocess.Popen(docker_cmd, stdout = sys.stdout, stderr = sys.stderr, text=True)
|
||||
#result = subprocess.run(docker_cmd, capture_output=False, text=True)
|
||||
process.wait()
|
||||
|
||||
# Print the output or handle errors
|
||||
if process.returncode == 0:
|
||||
print("Docker runner succeeded")
|
||||
else:
|
||||
print(f"Docker runner failed #{process.returncode}:")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error running Docker: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 3:
|
||||
print("Usage: python run_batch_job.py <docker_image> <json_file> [additional_args...]")
|
||||
sys.exit(1)
|
||||
|
||||
docker_image = sys.argv[1]
|
||||
json_file = sys.argv[2]
|
||||
jam_track_id = sys.argv[3]
|
||||
docker_args = sys.argv[4:] # All remaining arguments are passed to the Docker command
|
||||
|
||||
# Load environment variables from JSON
|
||||
load_env_vars(json_file)
|
||||
|
||||
# Extract current environment variables (after setting from JSON)
|
||||
current_env = {key: value for key, value in os.environ.items() if key in json.load(open(json_file))}
|
||||
current_env["AWS_ACCESS_KEY_ID"] = os.environ["AWS_KEY"]
|
||||
current_env["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET"]
|
||||
|
||||
# Run the Docker container
|
||||
run_docker(docker_image, current_env, docker_args, jam_track_id)
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
python run_batch_job.py localhost/jamtrack-lambda:1.0.0 ../env/local.json create-jamtrack jamkazam-tency-202410-test "mapped/Ace of Base - The Sign - 10111/manifest.txt"
|
||||
|
|
@ -0,0 +1 @@
|
|||
python batch/run_batch_job.py localhost/jamtrack-local:1.0.0 env/dev.json create-jamtrack jamkazam-tency-202410 "mapped/will.i.am - It's My Birthday - 47217/manifest.txt"
|
||||
|
|
@ -0,0 +1 @@
|
|||
./run_batch_job.sh ../env/dev.json localhost/jamtrack-lambda:1.0.0 create-jamtrack jamkazam-tency-202410-test "mapped/Ace of Base - The Sign - 10111/manifest.txt"
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"Environment": "dev",
|
||||
"TencyZipsBucket": "jamkazam-tency-uploads",
|
||||
"TencyJamTracksBucket": "jamkazam-tency-202410",
|
||||
"AwsBucket": "jamkazam-staging",
|
||||
"AwsBucketPublic": "jamkazam-staging-public",
|
||||
"EfsId": "fs-0c6e24466df585bff",
|
||||
"VpcId": "vpc-040f8fef0c9700b58",
|
||||
"SubnetIds": "subnet-0a51ec0dd502ffa4a,subnet-0998ecced5ad2ed89",
|
||||
"SgIds": "sg-02cc6ee5382e2c0ce",
|
||||
"MountPath": "/mnt/efs",
|
||||
"JamTrackContainerPath": "727401853962.dkr.ecr.us-east-1.amazonaws.com/jamkazam/jamtrack-lambda:1.0.0",
|
||||
"DbName": "jam",
|
||||
"DbHost": "int.jamkazam.com",
|
||||
"DbPass": "ct2Es6DsZDjuTyh9WHRFrn4mQfhh62P8",
|
||||
"DbUser": "lambda"
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
{
|
||||
"Environment": "dev",
|
||||
"TencyZipsBucket": "jamkazam-tency-uploads",
|
||||
"TENCY_ZIPS_BUCKET": "jamkazam-tency-uploads",
|
||||
"TencyJamTracksBucket": "jamkazam-tency-202410",
|
||||
"TENCY_JAMTRACKS_BUCKET": "jamkazam-tency-202410",
|
||||
"AwsBucket": "jamkazam-staging",
|
||||
"AWS_BUCKET": "jamkazam-staging",
|
||||
"AwsBucketPublic": "jamkazam-staging-public",
|
||||
"AWS_BUCKET_PUBLIC": "jamkazam-staging-public",
|
||||
"EfsId": "fs-0c6e24466df585bff",
|
||||
"VpcId": "vpc-0b77b2557b7128087",
|
||||
"SubnetIds": "subnet-085e9ab833dc7e32a",
|
||||
"SgIds": "sg-0b2279b3a0966593f",
|
||||
"MountPath": "/lambda",
|
||||
"JamTrackContainerPath": "727401853962.dkr.ecr.us-east-1.amazonaws.com/jamkazam/jamtrack-lambda:1.0.0",
|
||||
"DbName": "jam",
|
||||
"DB_NAME": "jam",
|
||||
"DbHost": "int.jamkazam.com",
|
||||
"DB_HOST": "int.jamkazam.com",
|
||||
"DbPass": "ct2Es6DsZDjuTyh9WHRFrn4mQfhh62P8",
|
||||
"DB_PASSWORD": "ct2Es6DsZDjuTyh9WHRFrn4mQfhh62P8",
|
||||
"DbUser": "lambda",
|
||||
"DB_USER": "lambda",
|
||||
"FFMPEG_PATH": "/opt/bin/ffmpeg",
|
||||
"FFMPEG_PATH_MP3": "/opt/bin/ffmpeg",
|
||||
"JMEP_DIR": "/var/task/shared/jmep",
|
||||
"END_ON_FAIL": "1",
|
||||
"AWS_REGION": "us-east-1"
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
{
|
||||
"Environment": "dev",
|
||||
"TencyZipsBucket": "jamkazam-tency-uploads-test",
|
||||
"TENCY_ZIPS_BUCKET": "jamkazam-tency-uploads-test",
|
||||
"TencyJamTracksBucket": "jamkazam-tency-202410-test",
|
||||
"TENCY_JAMTRACKS_BUCKET": "jamkazam-tency-202410-test",
|
||||
"AwsBucket": "jamkazam-dev",
|
||||
"AWS_BUCKET": "jamkazam-dev",
|
||||
"AwsBucketPublic": "jamkazam-dev-public",
|
||||
"AWS_BUCKET_PUBLIC": "jamkazam-dev-public",
|
||||
"EfsId": "fs-0c6e24466df585bff",
|
||||
"VpcId": "vpc-040f8fef0c9700b58",
|
||||
"SubnetIds": "subnet-0998ecced5ad2ed89",
|
||||
"SgIds": "sg-02cc6ee5382e2c0ce,sg-0ff99640a2871ac4c",
|
||||
"MountPath": "/tmp",
|
||||
"JamTrackContainerPath": "jamtrack-lambda:1.0.0",
|
||||
"DbName": "jam",
|
||||
"DB_NAME": "jam",
|
||||
"DbHost": "host.docker.internal",
|
||||
"DB_HOST": "host.docker.internal",
|
||||
"DbPass": "jam",
|
||||
"DB_PASSWORD": "jam",
|
||||
"DbUser": "seth",
|
||||
"DB_USER": "seth",
|
||||
"FFMPEG_PATH": "/opt/bin/ffmpeg",
|
||||
"FFMPEG_PATH_MP3": "/opt/bin/ffmpeg",
|
||||
"JMEP_DIR": "/var/task/shared/jmep",
|
||||
"END_ON_FAIL": "1",
|
||||
"AWS_REGION": "us-east-1"
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"Environment": "prod",
|
||||
"TencyZipsBucket": "jamkazam-tency-uploads",
|
||||
"TENCY_ZIPS_BUCKET": "jamkazam-tency-uploads",
|
||||
"TencyJamTracksBucket": "jamkazam-tency-202410",
|
||||
"TENCY_JAMTRACKS_BUCKET": "jamkazam-tency-202410",
|
||||
"AwsBucket": "jamkazam",
|
||||
"AWS_BUCKET": "jamkazam",
|
||||
"AwsBucketPublic": "jamkazam-public",
|
||||
"AWS_BUCKET_PUBLIC": "jamkazam-public",
|
||||
"EfsId": "fs-0c6e24466df585bff",
|
||||
"VpcId": "vpc-040f8fef0c9700b58",
|
||||
"SubnetIds": "subnet-0998ecced5ad2ed89",
|
||||
"SgIds": "sg-02cc6ee5382e2c0ce,sg-0ff99640a2871ac4c",
|
||||
"MountPath" : "/lambda",
|
||||
"JamTrackContainerPath": "jamtrack-lambda:1.0.0",
|
||||
"DbName": "jam",
|
||||
"DB_NAME": "jam",
|
||||
"DbHost": "db.jamkazam.com",
|
||||
"DB_HOST": "db.jamkazam.com",
|
||||
"DbPass": "E2uEYFvPjhKbP4N7qp6kgMp4VddWNF4S",
|
||||
"DB_PASSWORD": "E2uEYFvPjhKbP4N7qp6kgMp4VddWNF4S",
|
||||
"DbUser": "lambda",
|
||||
"DB_USER": "lambda",
|
||||
"FFMPEG_PATH": "/opt/bin/ffmpeg",
|
||||
"FFMPEG_PATH_MP3": "/opt/bin/ffmpeg",
|
||||
"JMEP_DIR": "/var/task/shared/jmep",
|
||||
"END_ON_FAIL": "1",
|
||||
"AWS_REGION": "us-east-1"
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
BUNDLE_PATH: "vendor/bundle"
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
source "https://rubygems.org"
|
||||
gem "shared", path: "../../shared"
|
||||
#gem "protected_attributes"
|
||||
gem "activerecord"# "= 4.2.8" # or your Rails version
|
||||
gem "aws-sdk-lambda"
|
||||
gem "json" #, "1.8.6"
|
||||
gem "pg"
|
||||
gem "ox"
|
||||
gem 'iso-639'
|
||||
gem "logging"
|
||||
gem 'aws-sdk-s3', '~> 1' # , '~> 1'
|
||||
gem 'aws_lambda_ric'
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
PATH
|
||||
remote: ../../shared
|
||||
specs:
|
||||
shared (0.1.0)
|
||||
aws-sdk-s3 (~> 1.0)
|
||||
json (~> 2.0)
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activemodel (8.0.1)
|
||||
activesupport (= 8.0.1)
|
||||
activerecord (8.0.1)
|
||||
activemodel (= 8.0.1)
|
||||
activesupport (= 8.0.1)
|
||||
timeout (>= 0.4.0)
|
||||
activesupport (8.0.1)
|
||||
base64
|
||||
benchmark (>= 0.3)
|
||||
bigdecimal
|
||||
concurrent-ruby (~> 1.0, >= 1.3.1)
|
||||
connection_pool (>= 2.2.5)
|
||||
drb
|
||||
i18n (>= 1.6, < 2)
|
||||
logger (>= 1.4.2)
|
||||
minitest (>= 5.1)
|
||||
securerandom (>= 0.3)
|
||||
tzinfo (~> 2.0, >= 2.0.5)
|
||||
uri (>= 0.13.1)
|
||||
aws-eventstream (1.3.0)
|
||||
aws-partitions (1.1032.0)
|
||||
aws-sdk-core (3.214.1)
|
||||
aws-eventstream (~> 1, >= 1.3.0)
|
||||
aws-partitions (~> 1, >= 1.992.0)
|
||||
aws-sigv4 (~> 1.9)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.96.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-lambda (1.144.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-s3 (1.177.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sigv4 (1.10.1)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
aws_lambda_ric (2.0.0)
|
||||
base64 (0.2.0)
|
||||
benchmark (0.4.0)
|
||||
bigdecimal (3.1.9)
|
||||
concurrent-ruby (1.3.4)
|
||||
connection_pool (2.4.1)
|
||||
csv (3.3.2)
|
||||
drb (2.2.1)
|
||||
i18n (1.14.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
iso-639 (0.3.8)
|
||||
csv
|
||||
jmespath (1.6.2)
|
||||
json (2.9.1)
|
||||
little-plugger (1.1.4)
|
||||
logger (1.6.4)
|
||||
logging (2.4.0)
|
||||
little-plugger (~> 1.1)
|
||||
multi_json (~> 1.14)
|
||||
minitest (5.25.4)
|
||||
multi_json (1.15.0)
|
||||
ox (2.14.19)
|
||||
bigdecimal (>= 3.0)
|
||||
pg (1.5.9)
|
||||
securerandom (0.4.1)
|
||||
timeout (0.4.3)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
uri (1.0.2)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
x86_64-darwin-20
|
||||
|
||||
DEPENDENCIES
|
||||
activerecord
|
||||
aws-sdk-lambda
|
||||
aws-sdk-s3 (~> 1)
|
||||
aws_lambda_ric
|
||||
iso-639
|
||||
json
|
||||
logging
|
||||
ox
|
||||
pg
|
||||
shared!
|
||||
|
||||
BUNDLED WITH
|
||||
2.6.2
|
||||
|
|
@ -0,0 +1,203 @@
|
|||
require_relative "../../shared/lib/jam_ruby/lib/lambda_function"
|
||||
|
||||
require 'cgi'
|
||||
require 'fileutils'
|
||||
|
||||
module JamRuby
|
||||
end
|
||||
|
||||
# Establish database connection
|
||||
def establish_connection
|
||||
# Load database configuration
|
||||
db_config_path = File.join(Dir.pwd, 'config', 'database.yml')
|
||||
puts "db_config_path #{db_config_path}"
|
||||
db_config = YAML.load(ERB.new(File.read(db_config_path)).result)
|
||||
puts "db_config #{db_config}"
|
||||
environment = ENV['RAILS_ENV'] || 'production'
|
||||
|
||||
|
||||
# Establish connection
|
||||
ActiveRecord::Base.establish_connection(db_config[environment])
|
||||
end
|
||||
|
||||
def lambda_handler(event:, context:)
|
||||
|
||||
|
||||
|
||||
puts "event #{event}"
|
||||
invocation_id = event["invocationId"]
|
||||
invocation_schema_version = event["invocationSchemaVersion"]
|
||||
|
||||
if invocation_schema_version != "2.0"
|
||||
puts "InformationSchemaVersion must be 2.0; found #{invocation_schema_version}"
|
||||
exit 1
|
||||
end
|
||||
|
||||
results = []
|
||||
result_code = nil
|
||||
result_string = nil
|
||||
|
||||
user_arguments = event["job"]["userArguments"]
|
||||
mode = user_arguments["mode"] if user_arguments
|
||||
if mode.nil?
|
||||
puts "Mode not specified as argument: #{user_arguments}"
|
||||
exit 1
|
||||
end
|
||||
|
||||
task = event["tasks"][0]
|
||||
task_id = task["taskId"]
|
||||
obj_key = CGI.unescape(task["s3Key"])
|
||||
obj_version_id = task["s3VersionId"]
|
||||
bucket_name = task["s3Bucket"]
|
||||
|
||||
if run(mode, bucket_name, obj_key, task_id) == true
|
||||
result_code = "Succeeded"
|
||||
result_string = "Imported #{obj_key}"
|
||||
else
|
||||
result_code = "Failed"
|
||||
result_string = "Failed to import #{obj_key}"
|
||||
end
|
||||
|
||||
results.append(
|
||||
{
|
||||
"taskId": task_id,
|
||||
"resultCode": result_code,
|
||||
"resultString": result_string,
|
||||
}
|
||||
)
|
||||
|
||||
{
|
||||
"invocationSchemaVersion": invocation_schema_version,
|
||||
"treatMissingKeysAs": "PermanentFailure",
|
||||
"invocationId": invocation_id,
|
||||
"results": results,
|
||||
}
|
||||
end
|
||||
|
||||
def run(mode, bucket_name, obj_key, task_id)
|
||||
|
||||
puts "mode=#{mode}"
|
||||
ENV['LD_LIBRARY_PATH'] = "/opt/lib:" + ENV['LD_LIBRARY_PATH'].to_s
|
||||
|
||||
# Debugging: Print the library path to verify
|
||||
puts "LD_LIBRARY_PATH: #{ENV['LD_LIBRARY_PATH']}"
|
||||
|
||||
zip_key = obj_key
|
||||
manifest_name = File.basename(zip_key)
|
||||
|
||||
puts "Processing obj_key #{obj_key} manifest #{manifest_name}"
|
||||
|
||||
tency_zips_bucket = ENV["TENCY_ZIPS_BUCKET"] #jamka
|
||||
tency_uploads_bucket = ENV["TENCY_JAMTRACKS_BUCKET"] # jamkazam-tency-202410
|
||||
|
||||
tency_aws_region = ENV["AWS_REGION"] || "us-east-1"
|
||||
if tency_aws_region.nil?
|
||||
puts "tency_aws_region #{tency_aws_region}"
|
||||
end
|
||||
puts "Tency AWS Region '#{tency_aws_region}'"
|
||||
s3_host = ENV["S3_HOST"] || nil # "http://localhost:19090"
|
||||
|
||||
tmp = ENV["MOUNT_PATH"] || "/tmp"
|
||||
|
||||
if mode == "cleanup"
|
||||
FileUtils.rm_rf(tmp)
|
||||
return true
|
||||
end
|
||||
|
||||
working_dir = File.join(tmp, mode, manifest_name)
|
||||
puts "Working_dir #{working_dir}"
|
||||
if File.exist?(working_dir)
|
||||
FileUtils.remove_dir(working_dir, true)
|
||||
end
|
||||
FileUtils.mkdir_p(working_dir)
|
||||
|
||||
begin
|
||||
local_manifest_path = File.join(working_dir, "manifest.txt")
|
||||
|
||||
puts "local manifest path #{local_manifest_path}"
|
||||
# Initialize the S3 client for the manifest
|
||||
s3_client = Aws::S3::Client.new(
|
||||
region: tency_aws_region,
|
||||
force_path_style: false,
|
||||
endpoint: 'https://s3.us-east-1.amazonaws.com'
|
||||
)
|
||||
|
||||
puts s3_client.config.endpoint
|
||||
|
||||
importer = JamRuby::JamTrackLambdaImporter.new
|
||||
|
||||
#manifest = "/Users/seth/workspace/tency/scripts/manifests/ace-of-base_the-sign_10111/manifest.txt"
|
||||
#tmp = "/Users/seth/workspace/jam-cloud/lambda/jamtrack-importer/tmp"
|
||||
|
||||
if mode == "unzip"
|
||||
#Aws.config[:region] = tency_aws_region
|
||||
puts "HEYYYY zips bucket: #{bucket_name}, exploded-files bucket: #{tency_uploads_bucket}"
|
||||
tency_zips_manager = JamRuby::S3Manager.new(bucket_name, s3_host)
|
||||
tence_uploads_manager = JamRuby::S3Manager.new(tency_uploads_bucket, s3_host)
|
||||
|
||||
puts importer.tency_unzipper(tency_zips_manager, tence_uploads_manager, working_dir, manifest_name, local_manifest_path)
|
||||
elsif mode == "create-jamtrack"
|
||||
|
||||
tency_zips_manager = JamRuby::S3Manager.new(tency_zips_bucket, s3_host)
|
||||
tence_uploads_manager = JamRuby::S3Manager.new(tency_uploads_bucket, s3_host)
|
||||
|
||||
|
||||
begin
|
||||
# Download the manifest from S3
|
||||
puts "Downloading manifest from #{bucket_name} the #{zip_key}"
|
||||
File.open(local_manifest_path, 'wb') do |file|
|
||||
s3_client.get_object(bucket: bucket_name, key: zip_key) do |chunk|
|
||||
file.write(chunk)
|
||||
end
|
||||
end
|
||||
|
||||
puts "File downloaded successfully to: #{local_manifest_path}"
|
||||
rescue Aws::S3::Errors::ServiceError => e
|
||||
puts "Failed to download manifest: #{e.message}"
|
||||
result_code = "PermanentFailure"
|
||||
result_string = "Failed to download manifest: #{e.message}"
|
||||
end
|
||||
|
||||
if ActiveRecord::Base.connected?
|
||||
puts "Already connected to db"
|
||||
else
|
||||
establish_connection
|
||||
end
|
||||
|
||||
result = ActiveRecord::Base.connection.execute('SELECT NOW()')
|
||||
puts "Database query result: #{result.first}"
|
||||
|
||||
JamRuby::JamTrackLambdaImporter.storage_format = "Tency"
|
||||
JamRuby::JamTrackLambdaImporter::import(tence_uploads_manager, working_dir, manifest_name, local_manifest_path, obj_key)
|
||||
else
|
||||
puts "Unknown mode #{mode}"
|
||||
exit 1
|
||||
end
|
||||
rescue => e
|
||||
puts "Error: #{e.message}"
|
||||
puts e.backtrace.join("\n")
|
||||
raise
|
||||
ensure
|
||||
puts "ensure block of run"
|
||||
if mode == "unzip" # because this happens generally in EFS/AWS
|
||||
FileUtils.rm_rf(working_dir)
|
||||
end
|
||||
end
|
||||
|
||||
puts "success"
|
||||
true
|
||||
end
|
||||
|
||||
# take the 1st 3 args from the cli
|
||||
if __FILE__ == $0
|
||||
if ARGV.length < 3
|
||||
puts "Usage: #{$0} <mode> <bucket_name> <obj_key>"
|
||||
exit 1
|
||||
end
|
||||
|
||||
mode = ARGV[0]
|
||||
bucket_name = ARGV[1]
|
||||
obj_key = ARGV[2]
|
||||
|
||||
run(mode,bucket_name, obj_key, "1")
|
||||
end
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
production:
|
||||
adapter: postgresql
|
||||
encoding: unicode
|
||||
pool: 5
|
||||
host: <%= ENV['DB_HOST'] %>
|
||||
database: <%= ENV['DB_NAME'] %>
|
||||
username: <%= ENV['DB_USER'] %>
|
||||
password: <%= ENV['DB_PASSWORD'] %>
|
||||
port: 5432
|
||||
sslmode: require
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1 @@
|
|||
bundle exec ruby ../../main.rb
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.example</groupId>
|
||||
<artifactId>local-s3-server</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.github.robothy</groupId>
|
||||
<artifactId>local-s3-rest</artifactId>
|
||||
<version>1.19</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk</artifactId>
|
||||
<version>1.12.780</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<configuration>
|
||||
<mainClass>Main</mainClass>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/bash
|
||||
mvn compile exec:java
|
||||
|
|
@ -0,0 +1,99 @@
|
|||
import com.robothy.s3.rest.LocalS3;
|
||||
import com.robothy.s3.rest.bootstrap.LocalS3Mode;
|
||||
import com.amazonaws.services.s3.*;
|
||||
import com.amazonaws.*;
|
||||
import com.amazonaws.client.builder.*;
|
||||
import com.amazonaws.services.s3.model.*;
|
||||
import java.io.*;
|
||||
|
||||
public class Main {
|
||||
public static void main(String[] args) {
|
||||
LocalS3 localS3 = LocalS3.builder()
|
||||
.port(19090)
|
||||
.mode(LocalS3Mode.PERSISTENCE)
|
||||
.dataPath("./local-s3")
|
||||
.build();
|
||||
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
System.out.println("Shutting down Local S3 server...");
|
||||
localS3.shutdown();
|
||||
System.out.println("Local S3 server shut down gracefully.");
|
||||
}));
|
||||
|
||||
localS3.start();
|
||||
System.out.println("Local S3 server started on port 19090");
|
||||
|
||||
int port = 19090;
|
||||
AmazonS3 s3 = AmazonS3ClientBuilder.standard()
|
||||
.enablePathStyleAccess()
|
||||
.withClientConfiguration(new ClientConfiguration().withClientExecutionTimeout(0))
|
||||
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
|
||||
"http://localhost:" + port, "local"
|
||||
)).build();
|
||||
|
||||
String bucketName = "jamkazam-tency-uploads";
|
||||
|
||||
try {
|
||||
// Check if the bucket exists
|
||||
if (s3.doesBucketExistV2(bucketName)) {
|
||||
System.out.println("Bucket already exists: " + bucketName);
|
||||
} else {
|
||||
// Create the bucket
|
||||
Bucket bucket = s3.createBucket(bucketName);
|
||||
System.out.println("Bucket created successfully: " + bucket.getName());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.err.println("Error occurred while creating or checking the bucket: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
String filePath = "ace-of-base_the-sign_10111.zip";
|
||||
// String filePath = "tester.file";
|
||||
String s3Key = filePath;
|
||||
|
||||
try {
|
||||
// Check if the object already exists
|
||||
boolean objectExists = doesObjectExist(s3, bucketName, s3Key);
|
||||
if (objectExists) {
|
||||
System.out.println("Object already exists in S3: " + s3Key);
|
||||
} else {
|
||||
// Upload the file
|
||||
File file = new File(filePath);
|
||||
if (!file.exists()) {
|
||||
System.out.println("File not found: " + filePath);
|
||||
return;
|
||||
}
|
||||
|
||||
PutObjectRequest request = new PutObjectRequest(bucketName, s3Key, file);
|
||||
System.out.println("Uploading file " + s3Key);
|
||||
s3.putObject(request);
|
||||
|
||||
System.out.println("File uploaded successfully to S3: " + s3Key);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.err.println("Error occurred while checking or uploading to S3: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
// Keep the main thread alive to avoid premature termination
|
||||
try {
|
||||
Thread.currentThread().join();
|
||||
} catch (InterruptedException e) {
|
||||
System.err.println("Main thread interrupted: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean doesObjectExist(AmazonS3 s3, String bucketName, String objectKey) {
|
||||
try {
|
||||
s3.getObjectMetadata(new GetObjectMetadataRequest(bucketName, objectKey));
|
||||
return true; // Object exists
|
||||
} catch (com.amazonaws.services.s3.model.AmazonS3Exception e) {
|
||||
if (e.getStatusCode() == 404) {
|
||||
return false; // Object does not exist
|
||||
} else {
|
||||
throw e; // Rethrow for other errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
|
||||
module JamRuby
|
||||
end
|
||||
|
||||
require_relative "./lambdas/unzipper/app.rb"
|
||||
|
||||
|
||||
# mock event from Lambda / AWS
|
||||
event = {}
|
||||
event["invocationId"] = "123"
|
||||
event["invocationSchemaVersion"] = "2.0"
|
||||
event["job"] = {}
|
||||
event["job"]["userArguments"] = {}
|
||||
event["job"]["userArguments"]["mode"] = "create-jamtrack"
|
||||
event["tasks"] = [{ "taskId" => "123", "s3Key" => "mapped/Ace of Base - The Sign - 10111/manifest.txt", "s3BucketName" => "jamkazam-tency-202410-test" }]
|
||||
#event["tasks"] = [{ "taskId" => "123", "s3Key" => "mapped/Ace of Base - The Sign - 10111/manifest.txt", "s3BucketName" => "jamkazam-tency-202410-test" }]
|
||||
|
||||
|
||||
|
||||
#s3_manager = JamRuby::S3Manager.new('jamkazam-tency-uploads', "a", "b", "http://localhost:19090")
|
||||
#manifest = "/Users/seth/workspace/tency/scripts/manifests/ace-of-base_the-sign_10111/manifest.txt"
|
||||
#tmp = "/Users/seth/workspace/jam-cloud/lambda/jamtrack-importer/tmp"
|
||||
#tence_uploads_manager, working_dir, manifest_name, manifest_path
|
||||
# {
|
||||
# "Environment": "dev",
|
||||
# "TencyZipsBucket": "jamkazam-tency-uploads-test",
|
||||
# "TencyJamTracksBucket": "jamkazam-tency-202410-test",
|
||||
# "EfsId": "fs-0c6e24466df585bff",
|
||||
# "VpcId": "vpc-040f8fef0c9700b58",
|
||||
# "SubnetIds": "subnet-0998ecced5ad2ed89",
|
||||
# "SgIds": "sg-02cc6ee5382e2c0ce,sg-0ff99640a2871ac4c",
|
||||
# "MountPath": "/tmp",
|
||||
# "JamTrackContainerPath": "jamtrack-lambda:1.0.0",
|
||||
# "DbName": "jam",
|
||||
# "DbHost": "example.com",
|
||||
# "DbPass": "seth",
|
||||
# "DbUser": "seth"
|
||||
# }
|
||||
|
||||
# Parse JSON into a Ruby hash
|
||||
config = File.open("../../env/local.json") do |file|
|
||||
JSON.load(file)
|
||||
end
|
||||
# Load each key-value pair into ENV
|
||||
config.each do |key, value|
|
||||
puts "KEY #{key} VALUE #{value}"
|
||||
ENV[key] = value
|
||||
end
|
||||
|
||||
ENV["AWS_BUCKET"] = "jamkazam-dev"
|
||||
ENV["AWS_BUCKET_PUBLIC"] = "jamkazam-dev-public"
|
||||
ENV["DB_HOST"] = nil
|
||||
ENV["DB_USER"] = "seth"
|
||||
ENV["DB_NAME"] = "jam"
|
||||
ENV["END_ON_FAIL"] = "1"
|
||||
ENV["TENCY_ZIPS_BUCKET"] = "jamkazam-tency-uploads-test"
|
||||
ENV["TENCY_JAMTRACKS_BUCKET"] = "jamkazam-tency-202410-test"
|
||||
ENV["FFMPEG_PATH"] = "/Users/seth/workspace/jkclient-osx-build/ffmpeg/ffmpeg"
|
||||
ENV["FFMPEG_PATH_MP3"] = "/Users/seth/bin/ffmpeg"
|
||||
ENV["JMEP_DIR"] = "/Users/seth/workspace/jmep"
|
||||
lambda_handler(event:event, context:nil)
|
||||
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
|
||||
module JamRuby
|
||||
end
|
||||
|
||||
require "./lambda_function"
|
||||
|
||||
# mock event from Lambda / AWS
|
||||
event = {}
|
||||
event["invocationId"] = "123"
|
||||
event["invocationSchemaVersion"] = "1.0"
|
||||
event["tasks"] = [{ "taskId" => "123", "s3Key" => "manifests/ace-of-base_the-sign_10111/manifest", "s3BucketArn" => "arn:aws:s3:::test-bucket" }]
|
||||
|
||||
importer = JamRuby::JamTrackLambdaImporter.new
|
||||
|
||||
ENV["TENCY_ZIPS_BUCKET"] = "jamkazam-tency-uploads-test"
|
||||
|
||||
s3_manager = JamRuby::S3Manager.new(ENV["TENCY_ZIPS_BUCKET"] , "a", "b", "http://localhost:19090")
|
||||
manifest = "/Users/seth/workspace/tency/scripts/manifests/ace-of-base_the-sign_10111/manifest.txt"
|
||||
tmp = "/Users/seth/workspace/jam-cloud/lambda/jamtrack-importer/tmp"
|
||||
puts importer.tency_unzipper(s3_manager, manifest,tmp)
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
jamkazam-tency-202410-test,mapped/Ace of Base - The Sign - 10111/manifest.txt
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,58 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
require 'csv'
|
||||
|
||||
def process_kfn(file)
|
||||
# Check if the file exists
|
||||
unless File.exist?(file)
|
||||
raise "File not found: #{file}"
|
||||
end
|
||||
|
||||
# Read the file contents
|
||||
file_contents = CSV.read(file, col_sep: "\t")
|
||||
|
||||
# Generate bpm and start_time
|
||||
bpm = generate_bpm(file_contents)
|
||||
start_time = generate_start_time(file_contents)
|
||||
|
||||
[bpm, start_time]
|
||||
end
|
||||
|
||||
def generate_bpm(file_contents)
|
||||
# Look at only the first 8 rows for BPM calculation
|
||||
rows = file_contents.first(8)
|
||||
|
||||
# Calculate the deltas (differences in the 2nd column values)
|
||||
deltas = rows.each_cons(2).map do |row1, row2|
|
||||
row2[1].to_f - row1[1].to_f
|
||||
end
|
||||
|
||||
# Average the deltas and convert to BPM
|
||||
average_delta = deltas.sum / deltas.size
|
||||
bpm = (1 / average_delta) * 60
|
||||
|
||||
bpm.round(2) # Return rounded BPM value
|
||||
end
|
||||
|
||||
def generate_start_time(file_contents)
|
||||
# Return the 2nd column value of the first row
|
||||
file_contents.first[1].to_f
|
||||
end
|
||||
|
||||
if __FILE__ == $0
|
||||
# Main script execution
|
||||
if ARGV.size != 1
|
||||
puts "Usage: #{$PROGRAM_NAME} <kfn_file>"
|
||||
exit 1
|
||||
end
|
||||
|
||||
kfn_file = ARGV[0]
|
||||
|
||||
begin
|
||||
bpm, start_time = process_kfn(kfn_file)
|
||||
puts "BPM: #{bpm}"
|
||||
puts "Start Time: #{start_time}"
|
||||
rescue => e
|
||||
puts "Error: #{e.message}"
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
Using this to create a static build of ffmpeg with lib-fdkaac available
|
||||
https://github.com/zimbatm/ffmpeg-static
|
||||
|
||||
# manually pushed a zip by cd'ing into assets, and pushing bin/ffmpeg, bin/oggenc, and bin/sox into s3 to be used as sam layer
|
||||
s3 cp assets.zip s3://jamkazam-repo/lambda-assets/jamtrack-importer-assets.zip
|
||||
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
version = 0.1
|
||||
[default.deploy.parameters]
|
||||
stack_name = "jamtrack-importer-staging"
|
||||
resolve_s3 = true
|
||||
s3_prefix = "jamtrack-importer-staging"
|
||||
region = "us-east-1"
|
||||
confirm_changeset = true
|
||||
capabilities = "CAPABILITY_IAM"
|
||||
parameter_overrides = "Environment=\"dev\" TencyZipsBucket=\"jamkazam-tency-uploads-test\" TencyJamTracksBucket=\"jamkazam-tency-202410-test\" EfsId=\"fs-0c6e24466df585bff\" VpcId=\"vpc-040f8fef0c9700b58\" SubnetIds=\"subnet-0998ecced5ad2ed89\" SgIds=\"sg-02cc6ee5382e2c0ce,sg-0ff99640a2871ac4c\" MountPath=\"/lambda\" JamTrackContainerPath=\"gcr.io/tough-craft-276813/jamtrack-lambda:1.0.0\" DbHost=\"int.jamkazam.com\" DbUser=\"lambda\" DbPass=\"ct2Es6DsZDjuTyh9WHRFrn4mQfhh62P8\" DbName=\"jam\" AwsBucket=\"jamkazam-staging\" AwsBucketPublic=\"jamkazam-staging-public\""
|
||||
image_repositories = ["TencyUnzipFunction=727401853962.dkr.ecr.us-east-1.amazonaws.com/jamtrackimporterstaging1c2500bd/tencyunzipfunction48755338repo"]
|
||||
|
|
@ -0,0 +1 @@
|
|||
scripts/build-container-image-sam && scripts/deploy-container-image 1.0.0
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
docker rmi $(docker images --filter=reference="*jamtrack-local:rapid-x86_64*" -q)
|
||||
docker build --platform linux/amd64 -t jamtrack-local:1.0.0 .
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
docker rmi $(docker images --filter=reference="*jamtrack-lambda:rapid-x86_64*" -q)
|
||||
docker build --platform linux/amd64 -t jamtrack-lambda:1.0.0 -f Dockerfile.sam .
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Input file
|
||||
FILE=$1
|
||||
|
||||
# Check if the file is provided
|
||||
if [ -z "$FILE" ]; then
|
||||
echo "Usage: $0 <file>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if the file exists
|
||||
if [ ! -f "$FILE" ]; then
|
||||
echo "Error: File '$FILE' not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Find rows containing a comma and extract the first segment before '-'
|
||||
while IFS= read -r line; do
|
||||
if [[ $line == *,* ]]; then
|
||||
# Extract the first segment before '-'
|
||||
first_segment=$(echo "$line" | cut -d'-' -f1 | xargs)
|
||||
echo "Line with comma: '$line' | First segment: '$first_segment'"
|
||||
fi
|
||||
done < "$FILE"
|
||||
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
require 'csv'
|
||||
require 'optparse'
|
||||
|
||||
def process_csv(input_file)
|
||||
# Open the input file and process it line by line
|
||||
CSV.foreach(input_file) do |row|
|
||||
# Write only the first two columns to stdout
|
||||
puts row[0..1].to_csv
|
||||
end
|
||||
end
|
||||
|
||||
def main
|
||||
options = {}
|
||||
|
||||
# Define the command-line options
|
||||
OptionParser.new do |opts|
|
||||
opts.banner = "Usage: process_csv.rb -i INPUT_FILE"
|
||||
|
||||
opts.on("-i", "--input INPUT_FILE", "Path to the input CSV file") do |input|
|
||||
options[:input] = input
|
||||
end
|
||||
end.parse!
|
||||
|
||||
# Check if the input file is provided
|
||||
unless options[:input]
|
||||
puts "Error: Input file is required."
|
||||
puts "Usage: process_csv.rb -i INPUT_FILE"
|
||||
exit 1
|
||||
end
|
||||
|
||||
# Process the CSV file
|
||||
begin
|
||||
process_csv(options[:input])
|
||||
rescue Errno::ENOENT
|
||||
STDERR.puts "Error: File '#{options[:input]}' not found."
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
|
||||
# Run the script
|
||||
main if __FILE__ == $PROGRAM_NAME
|
||||
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Input Ruby file
|
||||
INPUT_FILE="lambda_function.rb"
|
||||
# Source directory where the files are located
|
||||
SOURCE_DIR="../../ruby/lib"
|
||||
|
||||
# Destination directory to copy the files
|
||||
DEST_DIR="./lib"
|
||||
|
||||
|
||||
|
||||
# Ensure the destination directory exists
|
||||
mkdir -p "$DEST_DIR"
|
||||
|
||||
# Parse the Ruby file
|
||||
while IFS= read -r line; do
|
||||
# Check if the line starts with `require "jam_ruby`
|
||||
if [[ $line =~ ^require\ \"jam_ruby/(.*)\"$ ]]; then
|
||||
# Extract the relative path from the require statement
|
||||
RELATIVE_PATH=${BASH_REMATCH[1]}
|
||||
|
||||
# Build the source and destination paths
|
||||
SOURCE_FILE="$SOURCE_DIR/jam_ruby/$RELATIVE_PATH.rb"
|
||||
DEST_FILE="$DEST_DIR/jam_ruby/$RELATIVE_PATH.rb"
|
||||
|
||||
# Ensure the destination subdirectory exists
|
||||
DEST_SUBDIR=$(dirname "$DEST_FILE")
|
||||
mkdir -p "$DEST_SUBDIR"
|
||||
|
||||
# Copy the file
|
||||
if [ -f "$SOURCE_FILE" ]; then
|
||||
cp "$SOURCE_FILE" "$DEST_FILE"
|
||||
echo "Copied: $SOURCE_FILE -> $DEST_FILE"
|
||||
else
|
||||
echo "Warning: Source file not found: $SOURCE_FILE"
|
||||
fi
|
||||
fi
|
||||
done < "$INPUT_FILE"
|
||||
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
AWS_ACCOUNT_ID=727401853962
|
||||
#ROLE_NAME=jamtrack-processor-test
|
||||
ROLE_NAME=JamKazamJamTracksBatchRole
|
||||
REPORT_BUCKET=jamkazam-tency-202410-test
|
||||
SOURCE_BUCKET=jamkazam-tency-uploads-test
|
||||
LAMBDA_FUNCTION_ARN=arn:aws:lambda:us-east-1:727401853962:function:jamtrack-importer-dev-TencyUnzipFunction-EzAuWk2YP3Oj
|
||||
#https://us-east-1.console.aws.amazon.com/s3/object/jamkazam-tency-uploads-test?region=us-east-1&bucketType=general&prefix=manifest.csv
|
||||
MANIFEST_ETAG=f3c0008ddd2d6f292e7e44cd3e2cff1b
|
||||
TOKEN=$(uuidgen)
|
||||
|
||||
set -eu -o pipefail
|
||||
|
||||
JOB_ID=$(aws s3control create-job \
|
||||
--account-id $AWS_ACCOUNT_ID \
|
||||
--operation '{"LambdaInvoke": {"FunctionArn": "'$LAMBDA_FUNCTION_ARN'"}}' \
|
||||
--manifest '{"Spec": {"Format": "S3BatchOperations_CSV_20180820", "Fields": ["Bucket", "Key"]}, "Location": {"ObjectArn": "arn:aws:s3:::'$SOURCE_BUCKET'/manifest.csv", "ETag": "'$MANIFEST_ETAG'"}}' \
|
||||
--report '{"Bucket": "arn:aws:s3:::'$REPORT_BUCKET'", "Prefix": "reports/", "Format": "Report_CSV_20180820", "Enabled": true, "ReportScope": "AllTasks"}' \
|
||||
--priority 42 \
|
||||
--role-arn arn:aws:iam::$AWS_ACCOUNT_ID:role/$ROLE_NAME \
|
||||
--description "Batch job to process zip files in S3 bucket" \
|
||||
--client-request-token "'$TOKEN'" \
|
||||
--region us-east-1 \
|
||||
--no-confirmation-required \
|
||||
--query "JobId" --output text)
|
||||
|
||||
echo $JOB_ID
|
||||
|
||||
echo "https://us-east-1.console.aws.amazon.com/s3/jobs/$JOB_ID?region=us-east-1"
|
||||
|
||||
echo "Activated S3 Batch Operations Job with ID: $JOB_ID"
|
||||
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
AWS_ACCOUNT_ID=727401853962
|
||||
#ROLE_NAME=jamtrack-processor-test
|
||||
ROLE_NAME=JamKazamJamTracksBatchRole
|
||||
REPORT_BUCKET=jamkazam-tency-202410
|
||||
SOURCE_BUCKET=jamkazam-tency-202410
|
||||
LAMBDA_FUNCTION_ARN=arn:aws:lambda:us-east-1:727401853962:function:jamtrack-importer-staging-TencyUnzipFunction-HSAQnacd11Sx
|
||||
#https://us-east-1.console.aws.amazon.com/s3/object/jamkazam-tency-uploads-test?region=us-east-1&bucketType=general&prefix=manifest.csv
|
||||
MANIFEST_ETAG=1f491116e97853094f14d22d7c110c49
|
||||
TOKEN=$(uuidgen)
|
||||
|
||||
set -eu -o pipefail
|
||||
|
||||
JOB_ID=$(aws s3control create-job \
|
||||
--account-id $AWS_ACCOUNT_ID \
|
||||
--operation '{"LambdaInvoke": {"FunctionArn": "'$LAMBDA_FUNCTION_ARN'", "InvocationSchemaVersion" : "2.0", "UserArguments" : {"mode" : "unzip"}}}' \
|
||||
--manifest '{"Spec": {"Format": "S3BatchOperations_CSV_20180820", "Fields": ["Bucket", "Key"]}, "Location": {"ObjectArn": "arn:aws:s3:::'$SOURCE_BUCKET'/manifests/unzip/single-test.csv", "ETag": "'$MANIFEST_ETAG'"}}' \
|
||||
--report '{"Bucket": "arn:aws:s3:::'$REPORT_BUCKET'", "Prefix": "reports/", "Format": "Report_CSV_20180820", "Enabled": true, "ReportScope": "AllTasks"}' \
|
||||
--priority 42 \
|
||||
--role-arn arn:aws:iam::$AWS_ACCOUNT_ID:role/$ROLE_NAME \
|
||||
--description "Batch job to process zip files in S3 bucket" \
|
||||
--client-request-token "'$TOKEN'" \
|
||||
--region us-east-1 \
|
||||
--no-confirmation-required \
|
||||
--query "JobId" --output text)
|
||||
|
||||
echo $JOB_ID
|
||||
|
||||
echo "https://us-east-1.console.aws.amazon.com/s3/jobs/$JOB_ID?region=us-east-1"
|
||||
|
||||
echo "Activated S3 Batch Operations Job with ID: $JOB_ID"
|
||||
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
AWS_ACCOUNT_ID=727401853962
|
||||
#ROLE_NAME=jamtrack-processor-test
|
||||
ROLE_NAME=JamKazamJamTracksBatchRole
|
||||
REPORT_BUCKET=jamkazam-tency-202410-test
|
||||
SOURCE_BUCKET=jamkazam-tency-uploads-test
|
||||
#LAMBDA_FUNCTION_ARN=arn:aws:lambda:us-east-1:727401853962:function:jamtrack-processing-ZipExtractorFunction-thM3NERnsIBg
|
||||
#https://us-east-1.console.aws.amazon.com/s3/object/jamkazam-tency-uploads-test?region=us-east-1&bucketType=general&prefix=manifest.csv
|
||||
MANIFEST_ETAG=da094abf7ed6cbd4ad41273c328d72b8
|
||||
TOKEN=$(uuidgen)
|
||||
|
||||
set -eu -o pipefail
|
||||
|
||||
JOB_ID=$(aws s3control create-job \
|
||||
--account-id $AWS_ACCOUNT_ID \
|
||||
--operation '{"LambdaInvoke": {"FunctionArn": "'$LAMBDA_FUNCTION_ARN'"}}' \
|
||||
--manifest '{"Spec": {"Format": "S3BatchOperations_CSV_20180820", "Fields": ["Bucket", "Key"]}, "Location": {"ObjectArn": "arn:aws:s3:::'$SOURCE_BUCKET'/manifest-full.csv", "ETag": "'$MANIFEST_ETAG'"}}' \
|
||||
--report '{"Bucket": "arn:aws:s3:::'$REPORT_BUCKET'", "Prefix": "reports/", "Format": "Report_CSV_20180820", "Enabled": true, "ReportScope": "AllTasks"}' \
|
||||
--priority 42 \
|
||||
--role-arn arn:aws:iam::$AWS_ACCOUNT_ID:role/$ROLE_NAME \
|
||||
--description "Batch job to process zip files in S3 bucket" \
|
||||
--client-request-token "'$TOKEN'" \
|
||||
--region us-east-1 \
|
||||
--no-confirmation-required \
|
||||
--query "JobId" --output text)
|
||||
|
||||
echo $JOB_ID
|
||||
|
||||
echo "https://us-east-1.console.aws.amazon.com/s3/jobs/$JOB_ID?region=us-east-1"
|
||||
|
||||
echo "Activated S3 Batch Operations Job with ID: $JOB_ID"
|
||||
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
|
||||
ACCOUNT_ID=727401853962
|
||||
REGION=us-east-1
|
||||
|
||||
VERSION="$1"
|
||||
|
||||
if [ -z "$VERSION" ]; then
|
||||
echo "Must specify version. Probably 1.0.0"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker tag "localhost/jamtrack-lambda:1.0.0" "$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/jamkazam/jamtrack-lambda:${VERSION}"
|
||||
|
||||
|
||||
aws ecr get-login-password --region $REGION | docker login --username AWS --password-stdin $ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com
|
||||
docker push $ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/jamkazam/jamtrack-lambda:${VERSION}
|
||||
aws lambda update-function-code --function-name jamtrack-importer-staging-TencyUnzipFunction-HSAQnacd11Sx --image-uri 727401853962.dkr.ecr.us-east-1.amazonaws.com/jamkazam/jamtrack-lambda:1.0.0
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
|
||||
sam build && sam deploy --template-file template.yaml --stack-name jamtrack-importer-staging \
|
||||
--parameter-overrides $(cat env/dev-sam.json | jq -r 'to_entries|map("\(.key)=\(.value|tostring)")|.[]') \
|
||||
--capabilities CAPABILITY_IAM --force-upload
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
|
||||
sam build && sam deploy --template-file template.yaml --stack-name jamtrack-importer-dev \
|
||||
--parameter-overrides $(cat env/dev.json | jq -r 'to_entries|map("\(.key)=\(.value|tostring)")|.[]') \
|
||||
--capabilities CAPABILITY_IAM
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"invocationSchemaVersion": "2.0",
|
||||
"invocationId": "invocation-id-example",
|
||||
"job": {
|
||||
"id": "job-id-example",
|
||||
"userArguments" : {
|
||||
"mode" : "create-jamtrack"
|
||||
}
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"taskId": "task-0id-example-1",
|
||||
"s3BucketName": "jamkazam-tency-202410-test",
|
||||
"s3Key": "mapped/Ace of Base - The Sign - 10111/manifest.txt",
|
||||
"s3VersionId": "example-version-id-1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"invocationSchemaVersion": "2.0",
|
||||
"invocationId": "invocation-id-example",
|
||||
"job": {
|
||||
"id": "job-id-example",
|
||||
"userArguments" : {
|
||||
"mode" : "unzip"
|
||||
}
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"taskId": "task-0id-example-1",
|
||||
"s3BucketName": "jamkazam-tency-202410-test",
|
||||
"s3Key": "manifests/ace-of-base_the-sign_10111/manifest.txt",
|
||||
"s3VersionId": "example-version-id-1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
#sam build --use-container --skip-pull-image && \
|
||||
sam build --use-container && \
|
||||
sam local invoke --event scripts/event-test.json --debug \
|
||||
--parameter-overrides $(cat env/local.json | jq -r 'to_entries|map("\(.key)=\(.value|tostring)")|.[]')
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
sam build --use-container --skip-pull-image && \
|
||||
sam local invoke --event scripts/event-test-import.json --debug --docker-network host --add-host example.com:192.168.4.235 \
|
||||
--parameter-overrides $(cat env/local.json | jq -r 'to_entries|map("\(.key)=\(.value|tostring)")|.[]')
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
docker rmi localhost/jamtrack-lambda:1.0.0
|
||||
docker rmi localhost/jamtrack-lambda:latest
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Usage: ./run_batch_job.sh docker_image command [additional_args...]
|
||||
|
||||
# Check for at least two arguments
|
||||
if [ "$#" -lt 2 ]; then
|
||||
echo "Usage: $0 docker_image command [additional_args...]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DOCKER_IMAGE=$1 # The Docker image to use
|
||||
COMMAND=$2 # The command to run inside the container
|
||||
shift 2 # Shift the arguments to access additional arguments
|
||||
|
||||
# Pass remaining arguments to the Docker container
|
||||
echo "Running Docker container with image: $DOCKER_IMAGE"
|
||||
echo "Command: $COMMAND"
|
||||
echo "Arguments: $@"
|
||||
|
||||
OUTPUT=$(docker run --rm "$DOCKER_IMAGE" "$COMMAND" "$@")
|
||||
|
||||
# Check the result
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error running Docker container."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Command output:"
|
||||
echo "$OUTPUT"
|
||||
|
||||
echo "Job completed successfully."
|
||||
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
import csv
|
||||
import sys
|
||||
|
||||
# Define the expected number of columns
|
||||
EXPECTED_COLUMNS = 5
|
||||
|
||||
# Input CSV file
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python validate_csv.py <csv_file>")
|
||||
sys.exit(1)
|
||||
|
||||
CSV_FILE = sys.argv[1]
|
||||
|
||||
# Check if the file exists
|
||||
try:
|
||||
with open(CSV_FILE, 'r') as file:
|
||||
reader = csv.reader(file)
|
||||
for line_number, row in enumerate(reader, start=1):
|
||||
if len(row) != EXPECTED_COLUMNS:
|
||||
print(f"Error: Line {line_number} has {len(row)} columns (expected {EXPECTED_COLUMNS}).")
|
||||
sys.exit(1)
|
||||
except FileNotFoundError:
|
||||
print(f"Error: File '{CSV_FILE}' not found.")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Validation passed: All rows have {EXPECTED_COLUMNS} columns.")
|
||||
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Define the expected number of columns
|
||||
EXPECTED_COLUMNS=5
|
||||
|
||||
# Input CSV file
|
||||
CSV_FILE=$1
|
||||
|
||||
# Check if the file is provided
|
||||
if [ -z "$CSV_FILE" ]; then
|
||||
echo "Usage: $0 <csv_file>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if the file exists
|
||||
if [ ! -f "$CSV_FILE" ]; then
|
||||
echo "Error: File '$CSV_FILE' not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate each row in the CSV
|
||||
line_number=0
|
||||
while IFS= read -r line; do
|
||||
line_number=$((line_number + 1))
|
||||
|
||||
# Use a CSV parser to correctly handle quoted fields
|
||||
column_count=$(echo "$line" | awk -v FPAT='([^,]*|"[^"]*")' '{print NF}')
|
||||
|
||||
if [ "$column_count" -ne "$EXPECTED_COLUMNS" ]; then
|
||||
echo "Error: Line $line_number has $column_count columns (expected $EXPECTED_COLUMNS)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
done < "$CSV_FILE"
|
||||
|
||||
echo "Validation passed: All rows have $EXPECTED_COLUMNS columns."
|
||||
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
target
|
||||
BUILD_NUMBER
|
||||
.idea
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
source "https://rubygems.org"
|
||||
|
||||
gem "json"
|
||||
gem "aws-sdk-s3"
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
target
|
||||
BUILD_NUMBER
|
||||
.idea
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1 @@
|
|||
{"header": {"version": 1, "copyright": "JamKazam 2015"}, "Events": [{"metronome": [{"ts": "-0:00:04:508", "action": "start", "bpm": 97.0, "vol": 100, "mode": "stream", "name": "beep", "meter": 1, "sound": "stream", "ticks": 8.0}, {"ts": "0:00:00:440", "action": "stop", "ticks": 8.0}]}, {"count_down": [{"ts": "-0:00:10:000", "duration": -10.0, "count": -10.0}]}, {"track_play": [{"ts": "0:00:00:000", "tracks": ["All"]}]}]}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# created via code using bpm/silence detection (bpm:97.154 offset:0.08 determined_start:0.3605890000000045)
|
||||
# authorative data bpm:97.0 authorative_start:0.36)
|
||||
prelude@10.0 #number of seconds before music starts
|
||||
metro_fin@00:00:00:440 bpm=97.0, ticks=8, pmode=stream, name=Beep, play=mono
|
||||
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
import json
|
||||
from pprint import pprint
|
||||
import random
|
||||
import sys, getopt
|
||||
import base64
|
||||
import tempfile
|
||||
import argparse
|
||||
from types import *
|
||||
import unicodedata
|
||||
import jparser
|
||||
|
||||
def process(argv):
|
||||
#print argv
|
||||
global parser
|
||||
parser = argparse.ArgumentParser(
|
||||
description='JamTrack Meta Processing Tool',
|
||||
epilog="Note: ")
|
||||
|
||||
parser.add_argument("-D", "--verbosity",
|
||||
help="increase output verbosity",
|
||||
action="store_true")
|
||||
|
||||
parser.add_argument("-i", "--ifile",metavar='JmepScript', type=str,
|
||||
help="The input JMEP script file (.jscr)",required=True,)
|
||||
|
||||
parser.add_argument("-o", "--ofile", metavar='Jmepfile', type=str,
|
||||
help="The output file (.jmep)",required=True,)
|
||||
|
||||
|
||||
|
||||
#parser.print_help()
|
||||
args = parser.parse_args(argv)
|
||||
#print args
|
||||
if args.verbosity:
|
||||
jparser.DEBUG_ENABLE = 1
|
||||
|
||||
#print args
|
||||
jp = jparser.JmepParser(args.ifile,0,args.ofile)
|
||||
if jp.processFile():
|
||||
jp.generateJson()
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
#create json struct from info
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
random.seed()
|
||||
process(sys.argv[1:])
|
||||
|
|
@ -0,0 +1,370 @@
|
|||
import json
|
||||
from pprint import pprint
|
||||
import sys
|
||||
import os
|
||||
import keywords
|
||||
import metronome
|
||||
import math
|
||||
from timestamp import formatTimeStamp
|
||||
|
||||
JMEP_START_TIMESTAMP = (-100000)
|
||||
DEBUG_ENABLE = 0
|
||||
|
||||
def is_array(var):
|
||||
return isinstance(var, (list, tuple))
|
||||
|
||||
class JmepParser:
|
||||
outFile=''
|
||||
inFile=''
|
||||
ts = 0.0
|
||||
lineNumber = 0
|
||||
header = {}
|
||||
header['version'] = 1
|
||||
header['copyright'] = "JamKazam 2015"
|
||||
|
||||
metroList = []
|
||||
preludeList = []
|
||||
playList=[]
|
||||
syncList = []
|
||||
|
||||
def __init__(self, fileName, ts=JMEP_START_TIMESTAMP, outKeyFile='', lineNumber=0):
|
||||
self.inFile = fileName
|
||||
self.outFile = outKeyFile
|
||||
self.ts = ts
|
||||
self.lineNumber = lineNumber
|
||||
#self.processFile()
|
||||
|
||||
def is_number(self,s):
|
||||
try:
|
||||
float(s)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def ValidTsFormat(self,t):
|
||||
sgn = 0
|
||||
if t[0] == '+' or t[0] == '-':
|
||||
#is forward relative to current time
|
||||
if t[0] == '+':
|
||||
sgn = 1
|
||||
else:
|
||||
sgn = -1
|
||||
|
||||
t = t[1:]
|
||||
|
||||
if self.is_number(t):
|
||||
if sgn:
|
||||
tVal = float(t) * sgn + self.ts
|
||||
return (True, tVal)
|
||||
else:
|
||||
tVal = float(t)
|
||||
return (True, tVal)
|
||||
|
||||
#check if h:m:s:ms
|
||||
tt = t.split(':')
|
||||
if len(tt) != 4:
|
||||
print("Unknown timestamp format at ",self.inFile,":",self.lineNumber)
|
||||
return (False,0)
|
||||
|
||||
#covert to absolute number of seconds
|
||||
tv = float(tt[0])*3600 + float(tt[1])*60 + float(tt[2]) + float(tt[3])/1000.0
|
||||
if sgn:
|
||||
tVal = tv * sgn + self.ts
|
||||
return (True, tVal)
|
||||
else:
|
||||
tVal = tv
|
||||
return (True, tVal)
|
||||
|
||||
def extractNumval(self,nameVal,lineNumer):
|
||||
try:
|
||||
val = float(nameVal[1])
|
||||
return (True,val)
|
||||
except:
|
||||
print("Invalid name value format '",nameVal,"' at ",self.inFile,":",lineNumer)
|
||||
return (False,0)
|
||||
|
||||
def extraStringVal(self,nameVal,validator,lineNumber):
|
||||
if not is_array(nameVal):
|
||||
print("Invalid value in format '",nameVal,"' at ",self.inFile,":",lineNumber)
|
||||
return (False,'')
|
||||
str = nameVal[1].lower().strip()
|
||||
if validator:
|
||||
if str not in validator:
|
||||
print("Invalid value in format '",nameVal,"' at ",self.inFile,":",lineNumber)
|
||||
return (False,'')
|
||||
return (True,str)
|
||||
|
||||
def processTimestamp(self,kw):
|
||||
mylist = kw.split('@')
|
||||
t = mylist[1]
|
||||
(val,t) = self.ValidTsFormat(t)
|
||||
return (val,t)
|
||||
|
||||
def metronome(self, val, bStart, arg, lineNumber):
|
||||
(val,t) = self.processTimestamp(val)
|
||||
if not val:
|
||||
return False
|
||||
|
||||
|
||||
ticks = 0
|
||||
bpm = 0
|
||||
duration = 0
|
||||
vol = 100
|
||||
meter = 1
|
||||
name = 'default'
|
||||
pmode='stream'
|
||||
play='mono'
|
||||
#create tuples of (name,values)
|
||||
|
||||
valid_names = ['default','sine', 'click','kick', 'beep','snare']
|
||||
valid_play = ['mono','left','right']
|
||||
valid_pmode = ['stream','distributed']
|
||||
|
||||
args = arg.split(',')
|
||||
for nameVal in args:
|
||||
if '=' not in nameVal:
|
||||
print("Invalid name value format '",nameVal,"' at ",self.inFile,":",lineNumber)
|
||||
return False
|
||||
nv = nameVal.split("=")
|
||||
|
||||
n = nv[0].lower().strip()
|
||||
b = True
|
||||
if n == 'bpm':
|
||||
(b,bpm) = self.extractNumval(nv,lineNumber)
|
||||
elif n == 'ticks':
|
||||
(b,ticks) = self.extractNumval(nv,lineNumber)
|
||||
elif n == 'vol':
|
||||
(b,vol) = self.extractNumval(nv,lineNumber)
|
||||
elif n == 'len':
|
||||
(b,duration) = self.extractNumval(nv,lineNumber)
|
||||
elif n == 'pmode':
|
||||
(b,pmode) = self.extraStringVal(nv,valid_pmode,lineNumber)
|
||||
elif n == 'name':
|
||||
(b,name) = self.extraStringVal(nv,valid_names,lineNumber)
|
||||
elif n == 'play':
|
||||
(b,play) = self.extraStringVal(nv,valid_play,lineNumber)
|
||||
else:
|
||||
print("Invalid name value format '",nameVal,"' at ",self.inFile,":",lineNumber)
|
||||
return False
|
||||
#check if argument was not a number
|
||||
if not b:
|
||||
return False
|
||||
|
||||
if duration and ticks:
|
||||
print("Cannot specify both len and ticks '",self.inFile,":",lineNumber)
|
||||
return False
|
||||
|
||||
if not bpm:
|
||||
print("BPM value must be specified '",self.inFile,":",lineNumber)
|
||||
return False
|
||||
|
||||
if ticks:
|
||||
duration = ticks*60/bpm
|
||||
|
||||
tStart = 0
|
||||
tStop = 0
|
||||
|
||||
if bStart:
|
||||
tStart = t
|
||||
tStop = t + duration
|
||||
else:
|
||||
tStart = t - duration
|
||||
tStop = t
|
||||
|
||||
mn = metronome.Metronome(ticks, bpm, tStart, tStop, vol, pmode, name,play, meter)
|
||||
|
||||
#check for time overlap
|
||||
for om in self.metroList:
|
||||
if mn.doesTimeOverlap(om):
|
||||
print("Metronome time range overlap '",nameVal,"' at ",self.inFile,":",lineNumber)
|
||||
return False
|
||||
|
||||
self.metroList.append(mn)
|
||||
#newlist = sorted(self.metroList, key=lambda x: x.startTs, reverse=True)
|
||||
return True
|
||||
|
||||
def prelude(self, val, arg, lineNumber):
|
||||
(val,t) = self.processTimestamp(val)
|
||||
if not val:
|
||||
return False
|
||||
dt = {}
|
||||
t = abs(t)* -1.0
|
||||
dt['ts']= formatTimeStamp(t)
|
||||
dt['duration'] = t
|
||||
dt['count'] = t
|
||||
self.preludeList = []
|
||||
self.preludeList.append(dt)
|
||||
return True
|
||||
|
||||
def play(self, val, arg, lineNumber):
|
||||
(val,t) = self.processTimestamp(val)
|
||||
if not val:
|
||||
return False
|
||||
if t:
|
||||
print("Play time must be 0 '",val,"' at ",self.inFile,":",lineNumber)
|
||||
return False
|
||||
|
||||
pd = {}
|
||||
pd['ts']=formatTimeStamp(0)
|
||||
pdTrack = ['All']
|
||||
pd['tracks']=pdTrack
|
||||
self.playList = pd
|
||||
return True
|
||||
|
||||
def syncTs(self, val, arg, lineNumber):
|
||||
(val,t) = self.processTimestamp(val)
|
||||
if not val:
|
||||
return False
|
||||
|
||||
pd = {}
|
||||
pd['ts'] = formatTimeStamp(t)
|
||||
self.syncList.append(pd)
|
||||
return True
|
||||
|
||||
def includeFile(self,val, arg, lineNumber):
|
||||
(val,t) = self.processTimestamp(val)
|
||||
if not val:
|
||||
return False
|
||||
|
||||
p = JmepParser(arg,t,lineNumber)
|
||||
if not p.processFile():
|
||||
print("Error processing include file'",val,"' at ",self.inFile,":",lineNumber)
|
||||
|
||||
#TODO
|
||||
#merge the objects
|
||||
return
|
||||
|
||||
def setTimeStamp(self, val,arg, lineNumber):
|
||||
(val,t) = self.processTimestamp(val)
|
||||
if not val:
|
||||
return False
|
||||
self.ts = t
|
||||
return True
|
||||
|
||||
def author(self, val, arg, lineNumber):
|
||||
args = arg.split(',')
|
||||
for nameVal in args:
|
||||
if '=' not in nameVal:
|
||||
print("Invalid name value format '",nameVal,"' at ",self.inFile,":",lineNumber)
|
||||
return False
|
||||
nv = nameVal.split("=")
|
||||
|
||||
n = nv[0].lower().strip()
|
||||
if n == 'email':
|
||||
self.header['email'] = nv[1].strip()
|
||||
elif n == 'name':
|
||||
self.header['creator'] = nv[1].strip()
|
||||
else:
|
||||
print("Unknown name value format '",nameVal,"' at ",self.inFile,":",lineNumber)
|
||||
return True
|
||||
|
||||
def generateJson(self):
|
||||
if not self.playList:
|
||||
#load default play all
|
||||
pd = {}
|
||||
pd['ts']=formatTimeStamp(0)
|
||||
pdTrack = ['All']
|
||||
pd['tracks']=pdTrack
|
||||
self.playList = pd
|
||||
|
||||
data = {}
|
||||
eventData = {}
|
||||
data['header']=self.header
|
||||
eventList = []
|
||||
if self.metroList:
|
||||
newlist = sorted(self.metroList, key=lambda x: x.startTs, reverse=False)
|
||||
md = []
|
||||
for x in newlist:
|
||||
y = x.generateStart()
|
||||
md.append(y)
|
||||
y = x.generateStop()
|
||||
md.append(y)
|
||||
|
||||
mt={'metronome': md}
|
||||
eventList.append(mt)
|
||||
|
||||
#eventData['metronome'] = md
|
||||
#print md
|
||||
if self.preludeList:
|
||||
mt={'count_down': self.preludeList}
|
||||
eventList.append(mt)
|
||||
#eventData['count_down'] = self.preludeList
|
||||
if self.playList:
|
||||
mt={'track_play': [self.playList]}
|
||||
eventList.append(mt)
|
||||
#eventData['track_play']= self.playList
|
||||
if self.syncList:
|
||||
mt={'sync': self.syncList}
|
||||
eventList.append(mt)
|
||||
#eventData['sync'] = self.syncList
|
||||
|
||||
#make events into list
|
||||
#l = []
|
||||
#l.append(eventData)
|
||||
data['Events'] = eventList
|
||||
|
||||
jdata = json.dumps(data)
|
||||
|
||||
pprint(jdata)
|
||||
|
||||
fo = open(self.outFile, 'w')
|
||||
fo.write(jdata)
|
||||
fo.close()
|
||||
|
||||
def processLine(self, str, lineNum):
|
||||
# remove comment part from string
|
||||
if '#' in str:
|
||||
mylist = str.split('#')
|
||||
str = mylist[0].strip();
|
||||
#split string into name value pairs
|
||||
str.strip()
|
||||
#print str
|
||||
if not str:
|
||||
#empty string
|
||||
return True
|
||||
#pick of the first word - this is the key
|
||||
keyword = str.partition(' ')[0]
|
||||
#print 'keyword=',keyword
|
||||
|
||||
args = str.replace(keyword,'')
|
||||
keyword = keyword.lower()
|
||||
b = False
|
||||
if keywords.METRO_START in keyword:
|
||||
b = self.metronome(keyword,True,args,lineNum)
|
||||
elif keywords.METRO_FIN in keyword:
|
||||
b= self.metronome(keyword,False,args,lineNum)
|
||||
elif keywords.PRELUDE in keyword:
|
||||
b = self.prelude(keyword,args,lineNum)
|
||||
elif keywords.PLAY_FILE in keyword:
|
||||
return self.play(keyword,args,lineNum)
|
||||
elif keywords.EXPLICIT_TIMESTAMP in keyword:
|
||||
b =self.setTimeStamp(keyword,args,lineNum)
|
||||
elif keywords.AUTHOR in keyword:
|
||||
b = self.author(keyword,args,lineNum)
|
||||
elif keywords.EXPLICIT_SYNC in keyword:
|
||||
b = self.syncTs(keyword,args,lineNum)
|
||||
elif keywords.INCLUDE_FILE in keyword:
|
||||
b = self.includeFile(keyword,args,lineNum)
|
||||
else:
|
||||
print("Unknown keyword '",keyword,"' at ",self.inFile,":",lineNum)
|
||||
return False
|
||||
return b
|
||||
|
||||
def processFile(self):
|
||||
content = []
|
||||
with open(self.inFile ) as f:
|
||||
content = f.readlines()
|
||||
content = [x.strip('\n') for x in content]
|
||||
|
||||
#now remove comments portion from lines
|
||||
linenum = 1
|
||||
for x in content:
|
||||
#print x
|
||||
self.lineNumber = linenum
|
||||
if DEBUG_ENABLE:
|
||||
print(linenum, x)
|
||||
if not self.processLine(x,linenum):
|
||||
return False
|
||||
linenum = linenum + 1
|
||||
|
||||
return True
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
METRO_START = 'metro_start'
|
||||
METRO_FIN = 'metro_fin'
|
||||
EXPLICIT_TIMESTAMP = 'ts'
|
||||
AUTHOR ='author'
|
||||
PRELUDE = 'prelude'
|
||||
INCLUDE_FILE = 'include'
|
||||
PLAY_FILE = 'play'
|
||||
EXPLICIT_SYNC = 'sync'
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
import math
|
||||
from timestamp import formatTimeStamp
|
||||
|
||||
class Metronome:
|
||||
bpm = 0
|
||||
volume = 0
|
||||
meter=1
|
||||
name=''
|
||||
play=''
|
||||
pmode =''
|
||||
startTs =0
|
||||
stopTs = 0
|
||||
ticks = 0
|
||||
|
||||
def __init__(self, ticks, bpm, tStart, tStop, vol, pmode, name="default",play='mono', meter = 1):
|
||||
self.ticks = ticks
|
||||
self.bpm = bpm
|
||||
self.startTs = tStart
|
||||
self.stopTs = tStop
|
||||
self.volume = vol
|
||||
self.meter = meter
|
||||
self.pmode = pmode
|
||||
self.name = name
|
||||
self.play = play
|
||||
|
||||
def doesTimeOverlap(self, other):
|
||||
if(self.startTs < other.startTs):
|
||||
return self.stopTs > other.startTs
|
||||
return other.stopTs > self.startTs
|
||||
|
||||
def generateStart(self):
|
||||
dt={}
|
||||
dt['ts'] = formatTimeStamp(self.startTs)
|
||||
dt['action']='start'
|
||||
dt['bpm'] = self.bpm
|
||||
dt['vol'] = self.volume
|
||||
dt['mode'] = self.pmode
|
||||
dt['name'] = self.name
|
||||
dt['meter'] = self.meter
|
||||
dt['sound'] = self.pmode
|
||||
dt['ticks'] = self.ticks
|
||||
return dt
|
||||
|
||||
def generateStop(self):
|
||||
dt={}
|
||||
dt['ts'] = formatTimeStamp(self.stopTs)
|
||||
dt['action']='stop'
|
||||
dt['ticks'] = self.ticks
|
||||
return dt
|
||||
|
||||
def setMeter(self,meter):
|
||||
self.meter = meter
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
{"header": {"version": 1, "email": "ant@jamkazam.com", "copyright": "JamKazam 2015", "creator": "\"Anthony RockStar\""}, "Events": [{"metronome": [{"sound": "stream", "name": "click", "vol": 100, "bpm": 99.0, "ts": "-0:00:02:424", "meter": 1, "mode": "stream", "action": "start"}, {"action": "stop", "ts": "0:00:00:000"}, {"sound": "stream", "name": "default", "vol": 100, "bpm": 130.0, "ts": "0:01:00:003", "meter": 1, "mode": "stream", "action": "start"}, {"action": "stop", "ts": "0:01:02:003"}, {"sound": "stream", "name": "default", "vol": 100, "bpm": 160.0, "ts": "0:13:18:500", "meter": 1, "mode": "stream", "action": "start"}, {"action": "stop", "ts": "0:13:20:000"}, {"sound": "stream", "name": "default", "vol": 100, "bpm": 150.0, "ts": "0:17:30:002", "meter": 1, "mode": "stream", "action": "start"}, {"action": "stop", "ts": "0:17:31:643"}]}, {"count_down": [{"duration": -7.0, "count": -7.0, "ts": "-0:00:07:000"}]}, {"track_play": [{"tracks": ["All"], "ts": "0:00:00:000"}]}, {"sync": [{"ts": "-0:00:07:000"}, {"ts": "0:00:00:000"}]}]}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
sync@-7 #optional
|
||||
author name = "Anthony RockStar", email=ant@jamkazam.com
|
||||
prelude@7.0 #number of seconds before music starts
|
||||
play@0 all
|
||||
metro_fin@0 bpm=99 , ticks= 4, pmode=stream , name=click, play=left # finish playing at track timestamp
|
||||
#include@1000 "other file.gmep" #include a file at particular timestamp
|
||||
sync@0 #optional
|
||||
metro_start@0:1:0:3 bpm=130, len = 2.0 , pmode=stream # start time track play timestamp. Play for 2 secs
|
||||
ts@1000 #set the media timestamp explicitly
|
||||
metro_start@+0:0:50:3 bpm=150, ticks = 4.1 , pmode=stream # start metronome at time relative to last explicit ts
|
||||
metro_fin@-200 bpm=160 ,ticks= 4 , pmode=stream # finish playing at track relative to last explicit timestamp
|
||||
|
|
@ -0,0 +1 @@
|
|||
{"header": {"version": 1, "email": "ant@jamkazam.com", "copyright": "JamKazam 2015", "creator": "\"Anthony RockStar\""}, "Events": [{"metronome": [{"sound": "stream", "name": "beep", "vol": 100, "bpm": 91.0, "ts": "-0:00:02:637", "meter": 1, "mode": "stream", "action": "start"}, {"action": "stop", "ts": "0:00:00:000"}]}, {"count_down": [{"duration": -10.0, "count": -10.0, "ts": "-0:00:10:000"}]}, {"track_play": [{"tracks": ["All"], "ts": "0:00:00:000"}]}]}
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
author name = "Anthony RockStar", email=ant@jamkazam.com
|
||||
prelude@10.0 #number of seconds before music starts
|
||||
play@0 all
|
||||
metro_fin@0 bpm=91 , ticks= 4, pmode=stream , name=Beep, play=mono
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
import math
|
||||
__author__ = 'pwalker'
|
||||
|
||||
def formatTimeStamp(sec):
|
||||
c=''
|
||||
if sec < 0:
|
||||
c = '-'
|
||||
sec = abs(sec)
|
||||
milliseconds = (sec - math.floor(sec))*1000
|
||||
m,s=divmod(sec,60)
|
||||
h,m=divmod(m,60)
|
||||
if c :
|
||||
str = "-%i:%02i:%02i:%03i" % (h, m, s,milliseconds)
|
||||
return str
|
||||
str = "%i:%02i:%02i:%03i" % (h, m, s,milliseconds)
|
||||
return str
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
def app_config
|
||||
klass = Class.new do
|
||||
def aws_bucket
|
||||
if ENV["AWS_BUCKET"].nil?
|
||||
raise "set AWS_BUCKET env var"
|
||||
end
|
||||
ENV["AWS_BUCKET"]
|
||||
end
|
||||
|
||||
def aws_access_key_id
|
||||
if ENV["AWS_ACCESS_KEY_ID"].nil?
|
||||
raise "set AWS_ACCESS_KEY_ID env var"
|
||||
end
|
||||
ENV["AWS_ACCESS_KEY_ID"]
|
||||
end
|
||||
|
||||
def aws_secret_access_key
|
||||
if ENV["AWS_SECRET_ACCESS_KEY"].nil?
|
||||
raise "set AWS_SECRET_ACCESS_KEY env var"
|
||||
end
|
||||
ENV["AWS_SECRET_ACCESS_KEY"]
|
||||
end
|
||||
|
||||
def aws_bucket_public
|
||||
if ENV["AWS_BUCKET_PUBLIC"].nil?
|
||||
raise "set AWS_BUCKET_PUBLIC env var"
|
||||
end
|
||||
ENV["AWS_BUCKET_PUBLIC"]
|
||||
end
|
||||
|
||||
def ffmpeg_path
|
||||
if ENV["FFMPEG_PATH"].nil?
|
||||
raise "set FFMPEG_PATH env var"
|
||||
end
|
||||
ENV["FFMPEG_PATH"]
|
||||
end
|
||||
|
||||
def ffmpeg_path_mp3
|
||||
if ENV["FFMPEG_PATH_MP3"].nil?
|
||||
raise "set FFMPEG_PATH_MP3 env var"
|
||||
end
|
||||
ENV["FFMPEG_PATH_MP3"]
|
||||
end
|
||||
|
||||
def jmep_dir
|
||||
if ENV["JMEP_DIR"].nil?
|
||||
raise "set JMEP_DIR env var"
|
||||
end
|
||||
ENV["JMEP_DIR"]
|
||||
end
|
||||
end
|
||||
|
||||
klass.new
|
||||
end
|
||||
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
require 'json'
|
||||
require 'tempfile'
|
||||
require 'open3'
|
||||
require 'fileutils'
|
||||
require 'open-uri'
|
||||
|
||||
module JamRuby
|
||||
|
||||
# Interact with external python tools to create jmep json
|
||||
class JmepManager
|
||||
|
||||
@@log = Logging.logger[JmepManager]
|
||||
|
||||
class << self
|
||||
|
||||
def execute(jmep_text)
|
||||
|
||||
json = nil
|
||||
|
||||
if jmep_text.blank?
|
||||
return nil
|
||||
end
|
||||
|
||||
py_root = APP_CONFIG.jmep_dir
|
||||
Dir.mktmpdir do |tmp_dir|
|
||||
|
||||
output_json = File.join(tmp_dir, "jmep.json")
|
||||
input_text = File.join(tmp_dir, "jmep.txt")
|
||||
|
||||
# put JMEP text into input file
|
||||
File.open(input_text, 'w') { |file| file.write(jmep_text) }
|
||||
|
||||
py_file = File.join(py_root, "jmepgen.py")
|
||||
@@log.info "Executing python source in #{py_file}, outputting to #{output_json})"
|
||||
|
||||
# From http://stackoverflow.com/questions/690151/getting-output-of-system-calls-in-ruby/5970819#5970819:
|
||||
cli = "python #{py_file} -i '#{input_text}' -o '#{output_json}'"
|
||||
Open3.popen3(cli) do |stdin, stdout, stderr, wait_thr|
|
||||
pid = wait_thr.pid
|
||||
exit_status = wait_thr.value
|
||||
err = stderr.read(1000)
|
||||
out = stdout.read(1000)
|
||||
|
||||
raise ArgumentError, "#{out} #{err}" if exit_status != 0
|
||||
|
||||
json = File.read(output_json)
|
||||
end
|
||||
end
|
||||
|
||||
json
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
module JamRuby
|
||||
module AppConfig
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
included do
|
||||
end
|
||||
|
||||
module ClassMethods
|
||||
|
||||
end
|
||||
|
||||
def app_config
|
||||
APP_CONFIG
|
||||
end
|
||||
end
|
||||
end
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,100 @@
|
|||
require "json"
|
||||
require "pg"
|
||||
#require "protected_attributes"
|
||||
require "active_record"
|
||||
require "logging"
|
||||
|
||||
#$LOAD_PATH.unshift(File.expand_path('lib', __dir__))
|
||||
|
||||
require_relative "../../app_config"
|
||||
|
||||
require_relative "./app_config"
|
||||
require_relative "./s3_manager_mixin"
|
||||
require_relative "./s3_public_manager_mixin"
|
||||
require_relative "./module_overrides"
|
||||
require_relative "./s3_util"
|
||||
require_relative "./s3_manager"
|
||||
require_relative "./profanity"
|
||||
require_relative "../jmep_manager"
|
||||
require_relative "../models/genre"
|
||||
require_relative "../models/instrument"
|
||||
require_relative "../models/jam_track"
|
||||
require_relative "../models/genre_jam_track"
|
||||
require_relative "../models/jam_track_track"
|
||||
require_relative "../models/jam_track_file"
|
||||
require_relative "../models/jam_track_licensor"
|
||||
require_relative "./jam_track_lambda_importer"
|
||||
|
||||
|
||||
APP_CONFIG = app_config
|
||||
|
||||
# invoked by Lambda
|
||||
def handler(event:, context:)
|
||||
# Extract functionality from your Rake task here
|
||||
import_jamtrack(event)
|
||||
end
|
||||
|
||||
module JamRuby
|
||||
|
||||
class JamTrackHandlerImporter
|
||||
def initialize
|
||||
puts "JamTrackHandlerImporter initialized"
|
||||
end
|
||||
|
||||
def import_jamtrack(event)
|
||||
# Parse job parameters from Amazon S3 batch operations
|
||||
invocation_id = event["invocationId"]
|
||||
invocation_schema_version = event["invocationSchemaVersion"]
|
||||
|
||||
results = []
|
||||
result_code = nil
|
||||
result_string = nil
|
||||
|
||||
task = event["tasks"][0]
|
||||
task_id = task["taskId"]
|
||||
|
||||
puts("start invocation_id #{invocation_id} task_id #{task_id}")
|
||||
|
||||
puts("TASK #{task}")
|
||||
obj_key = URI.unescape(task["s3Key"])
|
||||
obj_version_id = task["s3VersionId"]
|
||||
bucket_name = task["s3BucketArn"].split(":")[-1]
|
||||
|
||||
puts("Got task: create manifest just for #{obj_version_id} and bucket #{bucket_name} from object #{obj_key}.")
|
||||
|
||||
zip_key = obj_key
|
||||
|
||||
# Configure ActiveRecord connection (update with your DB config)
|
||||
ActiveRecord::Base.establish_connection(
|
||||
adapter: "postgresql",
|
||||
host: ENV["DB_HOST"],
|
||||
username: ENV["DB_USER"],
|
||||
password: ENV["DB_PASS"],
|
||||
database: ENV["DB_NAME"]
|
||||
)
|
||||
|
||||
#importer = JamRuby::JamTrackLambdaImporter.new(storage_format="Tency_zipped")
|
||||
#importer.dry_run
|
||||
JamRuby::JamTrackLambdaImporter.storage_format = "Tency"
|
||||
JamRuby::JamTrackLambdaImporter.dry_run
|
||||
|
||||
importer
|
||||
# add one result to the list of possible acions take n (always 1, for us0)
|
||||
results.append(
|
||||
{
|
||||
"taskId": task_id,
|
||||
"resultCode": result_code,
|
||||
"resultString": result_string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
{
|
||||
"invocationSchemaVersion": invocation_schema_version,
|
||||
"treatMissingKeysAs": "PermanentFailure",
|
||||
"invocationId": invocation_id,
|
||||
"results": results,
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
require 'json'
|
||||
|
||||
class String
|
||||
def is_json?
|
||||
begin
|
||||
!!JSON.parse(self)
|
||||
rescue
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
module JamRuby
|
||||
|
||||
class Profanity
|
||||
@@dictionary_file = File.join(File.dirname(__FILE__), '../../..', 'config/profanity.yml')
|
||||
@@dictionary = nil
|
||||
|
||||
def self.dictionary
|
||||
@@dictionary ||= load_dictionary
|
||||
end
|
||||
|
||||
def self.load_dictionary
|
||||
YAML.load_file(@@dictionary_file)
|
||||
end
|
||||
|
||||
def self.check_word(word)
|
||||
dictionary.include?(word.downcase)
|
||||
end
|
||||
|
||||
def self.is_profane?(text)
|
||||
return false if text.nil?
|
||||
|
||||
text.split(/\W+/).each do |word|
|
||||
return true if check_word(word)
|
||||
end
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
||||
# This needs to be outside the module to work.
|
||||
class NoProfanityValidator < ActiveModel::EachValidator
|
||||
# implement the method called during validation
|
||||
def validate_each(record, attribute, value)
|
||||
record.errors[attribute] << 'cannot contain profanity' if JamRuby::Profanity.is_profane?(value)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -0,0 +1,174 @@
|
|||
require 'aws-sdk-s3'
|
||||
require 'active_support/all'
|
||||
require 'openssl'
|
||||
|
||||
module JamRuby
|
||||
|
||||
class S3Manager
|
||||
|
||||
@@def_opts = { :expires => 3600 * 24, :secure => true } # 24 hours from now
|
||||
|
||||
S3_PREFIX = 's3://'
|
||||
|
||||
def initialize(aws_bucket, endpoint = nil)
|
||||
if aws_bucket.nil?
|
||||
"BUCKET #{aws_bucket}"
|
||||
raise "auhaoeu"
|
||||
end
|
||||
@aws_bucket = aws_bucket
|
||||
if endpoint.nil?
|
||||
@s3 = Aws::S3::Client.new(region: 'us-east-1')
|
||||
else
|
||||
@s3 = Aws::S3::Client.new(:endpoint => endpoint)
|
||||
end
|
||||
@aws_key = nil
|
||||
@aws_endpoint = endpoint
|
||||
end
|
||||
|
||||
def s3_url(filename)
|
||||
"#{S3_PREFIX}#{@aws_bucket}/#{filename}"
|
||||
end
|
||||
|
||||
def s3_url?(filename)
|
||||
filename.start_with? S3_PREFIX
|
||||
end
|
||||
|
||||
def url(filename, options = @@def_opts)
|
||||
"http#{options[:secure] ? "s" : ""}://s3.amazonaws.com/#{@aws_bucket}/#{filename}"
|
||||
end
|
||||
|
||||
# XXX: the client can not support HTTPS atm!!! AGH Change the :url to => https://s3.jamkazam.com when client supports https
|
||||
# is_native_client check?
|
||||
def upload_sign(filename, content_md5, part_number, upload_id)
|
||||
hdt = http_date_time
|
||||
str_to_sign = "PUT\n#{content_md5}\n#{content_type}\n#{hdt}\n/#{@aws_bucket}/#{filename}?partNumber=#{part_number}&uploadId=#{upload_id}"
|
||||
signature = Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest.new('sha1'), @aws_secret, str_to_sign)).chomp
|
||||
{ :datetime => hdt,
|
||||
:md5 => content_md5,
|
||||
:url => "http://s3.amazonaws.com/#{@aws_bucket}/#{filename}?partNumber=#{part_number}&uploadId=#{upload_id}",
|
||||
:authorization => "AWS #{@aws_key}:#{signature}"
|
||||
}
|
||||
end
|
||||
|
||||
def sign_url(key, options = @@def_opts, operation = :read)
|
||||
s3_bucket.objects(key).url_for(operation, options).to_s
|
||||
end
|
||||
|
||||
def public_url(key, options = @@def_opts)
|
||||
s3_bucket.objects(key).public_url(options).to_s
|
||||
end
|
||||
|
||||
def presigned_post(key, options = @@def_opts)
|
||||
s3_bucket.objects(key).presigned_post(options)
|
||||
end
|
||||
|
||||
def multipart_upload_start(upload_filename)
|
||||
s3_bucket.objects[upload_filename].multipart_upload.id
|
||||
end
|
||||
|
||||
def multipart_upload_complete(upload_filename, upload_id)
|
||||
s3_bucket.objects[upload_filename].multipart_uploads[upload_id].complete(:remote_parts)
|
||||
end
|
||||
|
||||
def multipart_upload_abort(upload_filename, upload_id)
|
||||
s3_bucket.objects[upload_filename].multipart_uploads[upload_id].abort
|
||||
end
|
||||
|
||||
def multiple_upload_find_part(upload_filename, upload_id, part)
|
||||
s3_bucket.objects[upload_filename].multipart_uploads[upload_id].parts[part]
|
||||
end
|
||||
|
||||
def exists?(filename)
|
||||
s3_bucket.objects[filename].exists?
|
||||
end
|
||||
|
||||
def delete(filename)
|
||||
s3_bucket.objects[filename].delete
|
||||
end
|
||||
|
||||
def upload(key, filename, options={})
|
||||
object = s3_bucket.object(key)
|
||||
puts "upload key #{object} #{key} from #{filename}"
|
||||
object.upload_file(filename, options)
|
||||
end
|
||||
|
||||
def cached_upload(key, filename, options={})
|
||||
options[:file] = filename
|
||||
options.merge({expires: 5.years.from_now})
|
||||
s3_bucket.objects(key).write(filename, options)
|
||||
end
|
||||
|
||||
def delete_folder(folder)
|
||||
s3_bucket.objects.with_prefix(folder).delete_all
|
||||
end
|
||||
|
||||
def download(key, filename)
|
||||
object = s3_bucket.object(key)
|
||||
File.open(filename, "wb") do |f|
|
||||
object.get do |data|
|
||||
f.write(data)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def read_all(key)
|
||||
s = StringIO.new
|
||||
s3_bucket.objects(key).read do |data|
|
||||
s.write(data)
|
||||
end
|
||||
s.string
|
||||
end
|
||||
|
||||
def list_files(prefix)
|
||||
#tree = s3_bucket.as_tree(prefix: prefix)
|
||||
#tree.children.select(&:leaf?).collect(&:key)
|
||||
|
||||
list = []
|
||||
s3_bucket.objects(prefix: prefix).map(&:key).each do |key|
|
||||
list << key
|
||||
end
|
||||
|
||||
puts "list_files: #{prefix}, LIST #{list}"
|
||||
list
|
||||
end
|
||||
|
||||
def list_directories(prefix = nil)
|
||||
tree = s3_bucket.as_tree(prefix: prefix)
|
||||
tree.children.select(&:branch?).collect(&:prefix)
|
||||
end
|
||||
|
||||
def exists?(filename)
|
||||
s3_bucket.objects[filename].exists?
|
||||
end
|
||||
|
||||
def object(filename)
|
||||
s3_bucket.objects[filename]
|
||||
end
|
||||
|
||||
def length(filename)
|
||||
s3_bucket.objects[filename].content_length
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def s3_bucket
|
||||
#@s3.buckets[@aws_bucket]
|
||||
if @aws_endpoint.nil?
|
||||
s3 = Aws::S3::Resource.new(region: 'us-east-1')
|
||||
else
|
||||
s3 = Aws::S3::Resource.new(region: 'us-east-1', endpoint: @aws_endpoint)
|
||||
end
|
||||
s3.bucket(@aws_bucket)
|
||||
end
|
||||
|
||||
def content_type
|
||||
"audio/ogg"
|
||||
end
|
||||
|
||||
def http_date_time
|
||||
Time.now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
module JamRuby
|
||||
module S3ManagerMixin
|
||||
extend ActiveSupport::Concern
|
||||
include AppConfig
|
||||
|
||||
included do
|
||||
end
|
||||
|
||||
module ClassMethods
|
||||
def s3_manager(options={:bucket => nil, :public => false})
|
||||
@s3_manager ||= S3Manager.new(options[:bucket] ? options[:bucket] : (options[:public] ? APP_CONFIG.aws_bucket_public : APP_CONFIG.aws_bucket), APP_CONFIG.aws_access_key_id, APP_CONFIG.aws_secret_access_key)
|
||||
end
|
||||
end
|
||||
|
||||
def s3_manager(options={:bucket => nil, :public => false})
|
||||
@s3_manager ||= S3Manager.new(options[:bucket] ? options[:bucket] : (options[:public] ? app_config.aws_bucket_public : app_config.aws_bucket), app_config.aws_access_key_id, app_config.aws_secret_access_key)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
module JamRuby
|
||||
module S3PublicManagerMixin
|
||||
extend ActiveSupport::Concern
|
||||
include AppConfig
|
||||
|
||||
included do
|
||||
end
|
||||
|
||||
module ClassMethods
|
||||
|
||||
end
|
||||
|
||||
def s3_public_manager()
|
||||
@s3_public_manager ||= S3Manager.new(app_config.aws_bucket_public)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
require 'aws-sdk-s3'
|
||||
require 'active_support/all'
|
||||
|
||||
module JamRuby
|
||||
class S3Util
|
||||
@@def_opts = { :expires => 3600 * 24, :secure => true } # 24 hours from now
|
||||
@@s3 = Aws::S3::Client.new(:access_key_id => ENV['AWS_KEY'], :secret_access_key => ENV['AWS_SECRET'])
|
||||
|
||||
def self.sign_url(bucket, path, options = @@def_opts)
|
||||
bucket_gen = @@s3.buckets[bucket]
|
||||
"#{bucket_gen.objects[path].url_for(:read, options).to_s}"
|
||||
end
|
||||
|
||||
def self.url(aws_bucket, filename, options = @@def_opts)
|
||||
"http#{options[:secure] ? "s" : ""}://s3.amazonaws.com/#{aws_bucket}/#{filename}"
|
||||
end
|
||||
|
||||
def self.move(aws_bucket, source, destination)
|
||||
@@s3.buckets[aws_bucket].objects[source].move_to[destination]
|
||||
end
|
||||
|
||||
def self.delete(aws_bucket, path)
|
||||
@@s3.buckets[aws_bucket].objects[path].delete()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
module JamRuby
|
||||
class Genre < ActiveRecord::Base
|
||||
|
||||
self.primary_key = 'id'
|
||||
|
||||
# bands
|
||||
# has_many :genre_players, class_name: "JamRuby::GenrePlayer"
|
||||
# has_many :bands, class_name: "JamRuby::Band", through: :genre_players, condition: ['player_type = ?', 'JamRuby::Band']
|
||||
# has_many :users, class_name: "JamRuby::User", through: :genre_players, condition: ['player_type = ?', 'JamRuby::User']
|
||||
|
||||
|
||||
# music sessions
|
||||
has_many :music_sessions, :class_name => "JamRuby::MusicSession"
|
||||
|
||||
# genres
|
||||
has_and_belongs_to_many :recordings, :class_name => "JamRuby::Recording", :join_table => "recordings_genres"
|
||||
|
||||
# teachers
|
||||
has_many :teachers, :class_name => "JamRuby::Teacher", :through => :teachers_genres
|
||||
has_many :teachers_genres, :class_name => "JamRuby::TeacherGenre"
|
||||
|
||||
# jam tracks
|
||||
has_many :genres_jam_tracks, :class_name => "JamRuby::GenreJamTrack", :foreign_key => "genre_id"
|
||||
has_many :jam_tracks, :through => :genres_jam_tracks, :class_name => "JamRuby::JamTrack", :source => :genre
|
||||
|
||||
def to_s
|
||||
description
|
||||
end
|
||||
|
||||
def self.jam_track_list
|
||||
sql = "SELECT DISTINCT genre_id FROM genres_jam_tracks WHERE genre_id IS NOT NULL"
|
||||
Genre.select("DISTINCT(genres.id), genres.*")
|
||||
.where("genres.id IN (#{sql})")
|
||||
.order('genres.description ASC, genres.id')
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
module JamRuby
|
||||
class GenreJamTrack < ActiveRecord::Base
|
||||
|
||||
self.table_name = 'genres_jam_tracks'
|
||||
|
||||
#attr_accessible :jam_track_id, :genre_id
|
||||
|
||||
belongs_to :jam_track, class_name: 'JamRuby::JamTrack', inverse_of: :genres_jam_tracks
|
||||
belongs_to :genre, class_name: 'JamRuby::Genre', inverse_of: :genres_jam_tracks
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
module JamRuby
|
||||
class Instrument < ActiveRecord::Base
|
||||
|
||||
MAP_ICON_NAME = {
|
||||
"accordion" => "accordion",
|
||||
"acoustic guitar" => "acoustic_guitar",
|
||||
"banjo" => "banjo",
|
||||
"bass guitar" => "bass_guitar",
|
||||
"cello" => "cello",
|
||||
"clarinet" => "clarinet",
|
||||
"computer" => "computer",
|
||||
"default" => "default",
|
||||
"drums" => "drums",
|
||||
"electric guitar" => "electric_guitar",
|
||||
"euphonium" => "euphonium",
|
||||
"flute" => "flute",
|
||||
"french horn" => "french_horn",
|
||||
"harmonica" => "harmonica",
|
||||
"keyboard" => "keyboard",
|
||||
"mandolin" => "mandolin",
|
||||
"oboe" => "oboe",
|
||||
"other" => "other",
|
||||
"piano" => "piano",
|
||||
"saxophone" => "saxophone",
|
||||
"trombone" => "trombone",
|
||||
"trumpet" => "trumpet",
|
||||
"tuba" => "tuba",
|
||||
"ukulele" => "ukelele",
|
||||
"upright bass" => "upright_bass",
|
||||
"double bass" => "double_bass",
|
||||
"viola" => "viola",
|
||||
"violin" => "violin",
|
||||
"voice" => "voice"
|
||||
}
|
||||
|
||||
self.primary_key = 'id'
|
||||
|
||||
# users
|
||||
has_many :musician_instruments, :class_name => "JamRuby::MusicianInstrument"
|
||||
has_many :players, :through => :musician_instruments
|
||||
has_many :tracks, :class_name => "JamRuby::Track", :inverse_of => :instrument
|
||||
has_many :recorded_tracks, :class_name => "JamRuby::RecordedTrack", :inverse_of => :instrument
|
||||
|
||||
# music sessions
|
||||
has_and_belongs_to_many :music_sessions, :class_name => "JamRuby::ActiveMusicSession", :join_table => "genres_music_sessions"
|
||||
|
||||
# teachers
|
||||
has_many :teachers, :class_name => "JamRuby::Teacher", through: :teachers_instruments
|
||||
has_many :teachers_instruments, class_name: "JamRuby::TeacherInstrument"
|
||||
|
||||
def self.standard_list
|
||||
return Instrument.where('instruments.popularity > 0').order('instruments.description ASC')
|
||||
end
|
||||
|
||||
def self.jam_track_list
|
||||
sql = "SELECT DISTINCT instrument_id FROM jam_track_tracks WHERE instrument_id IS NOT NULL"
|
||||
Instrument.where("instruments.id IN (#{sql})")
|
||||
.order('instruments.description ASC')
|
||||
end
|
||||
|
||||
def icon_name
|
||||
MAP_ICON_NAME[self.id]
|
||||
end
|
||||
|
||||
def to_s
|
||||
description
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,680 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
module JamRuby
|
||||
class JamTrack < ActiveRecord::Base
|
||||
include JamRuby::S3ManagerMixin
|
||||
|
||||
TIME_SIGNATURES = %w{4/4 3/4 2/4 6/8 5/8'}
|
||||
STATUS = %w{Staging Production Retired}
|
||||
RECORDING_TYPE = %w{Cover Original}
|
||||
PRO = %w{ASCAP BMI SESAC}
|
||||
SALES_REGION = ['United States', 'Worldwide']
|
||||
|
||||
PRODUCT_TYPE = 'JamTrack'
|
||||
|
||||
@@log = Logging.logger[JamTrack]
|
||||
|
||||
attr_accessor :uploading_preview
|
||||
# attr_accessible :name, :description, :bpm, :time_signature, :status, :recording_type,
|
||||
# :original_artist, :songwriter, :publisher, :licensor, :licensor_id, :pro, :genres_jam_tracks_attributes, :sales_region, :price,
|
||||
# :reproduction_royalty, :public_performance_royalty, :reproduction_royalty_amount,
|
||||
# :licensor_royalty_amount, :pro_royalty_amount, :plan_code, :initial_play_silence, :jam_track_tracks_attributes,
|
||||
# :jam_track_tap_ins_attributes, :genre_ids, :version, :jmep_json, :jmep_text, :pro_ascap, :pro_bmi, :pro_sesac, :duration,
|
||||
# :server_fixation_date, :hfa_license_status, :hfa_license_desired, :alternative_license_status, :hfa_license_number, :hfa_song_code, :album_title, :year, :allow_free, as: :admin
|
||||
|
||||
validates :name, presence: true, length: {maximum: 200}
|
||||
validates :plan_code, presence: true, uniqueness: true, length: {maximum: 50 }
|
||||
validates :description, length: {maximum: 1000}
|
||||
validates :time_signature, inclusion: {in: [nil] + [''] + TIME_SIGNATURES} # the empty string is needed because of activeadmin
|
||||
validates :status, inclusion: {in: [nil] + STATUS}
|
||||
validates :recording_type, inclusion: {in: [nil] + RECORDING_TYPE}
|
||||
validates :original_artist, length: {maximum: 200}
|
||||
validates :songwriter, length: {maximum: 1000}
|
||||
validates :publisher, length: {maximum: 1000}
|
||||
validates :sales_region, inclusion: {in: [nil] + SALES_REGION}
|
||||
validates_format_of :price, with: /\A\d+\.*\d{0,2}\z/
|
||||
validates :version, presence: true
|
||||
validates :pro_ascap, inclusion: {in: [true, false]}
|
||||
validates :pro_bmi, inclusion: {in: [true, false]}
|
||||
validates :pro_sesac, inclusion: {in: [true, false]}
|
||||
validates :public_performance_royalty, inclusion: {in: [nil, true, false]}
|
||||
validates :reproduction_royalty, inclusion: {in: [nil, true, false]}
|
||||
validates :public_performance_royalty, inclusion: {in: [nil, true, false]}
|
||||
validates :duration, numericality: {only_integer: true}, :allow_nil => true
|
||||
validates :hfa_license_status, inclusion: {in: [true, false]}
|
||||
validates :hfa_license_desired, inclusion: {in: [true, false]}
|
||||
validates :alternative_license_status, inclusion: {in: [true, false]}
|
||||
validates :hfa_license_number, numericality: {only_integer: true}, :allow_nil => true
|
||||
validates :hfa_song_code, length: {maximum: 200}
|
||||
validates :album_title, length: {maximum: 200}
|
||||
validates :slug, uniqueness: true
|
||||
|
||||
validates_format_of :reproduction_royalty_amount, with: /\A\d+\.*\d{0,4}\z/, :allow_blank => true
|
||||
validates_format_of :licensor_royalty_amount, with: /\A\d+\.*\d{0,4}\z/, :allow_blank => true
|
||||
|
||||
belongs_to :licensor , class_name: 'JamRuby::JamTrackLicensor', foreign_key: 'licensor_id', :inverse_of => :jam_tracks
|
||||
|
||||
has_many :genres_jam_tracks, :class_name => "JamRuby::GenreJamTrack", :foreign_key => "jam_track_id", inverse_of: :jam_track
|
||||
has_many :genres, :through => :genres_jam_tracks, :class_name => "JamRuby::Genre", :source => :genre
|
||||
|
||||
has_many :jam_track_tracks, -> { order('track_type ASC, position ASC, part ASC, instrument_id ASC' )},:class_name => "JamRuby::JamTrackTrack"
|
||||
has_many :jam_track_tap_ins, -> { order('offset_time ASC')}, :class_name => "JamRuby::JamTrackTapIn"
|
||||
has_many :jam_track_files, :class_name => "JamRuby::JamTrackFile"
|
||||
|
||||
has_many :jam_track_rights, :class_name => "JamRuby::JamTrackRight" #, inverse_of: 'jam_track', :foreign_key => "jam_track_id" # '
|
||||
|
||||
has_many :owners, :through => :jam_track_rights, :class_name => "JamRuby::User", :source => :user
|
||||
|
||||
has_many :playing_sessions, :class_name => "JamRuby::ActiveMusicSession", :dependent => :destroy
|
||||
|
||||
has_many :recordings, :class_name => "JamRuby::Recording", :dependent => :destroy
|
||||
|
||||
# VRFS-2916 jam_tracks.id is varchar: REMOVE
|
||||
# has_many :plays, :class_name => "JamRuby::PlayablePlay", :foreign_key => :jam_track_id, :dependent => :destroy
|
||||
# VRFS-2916 jam_tracks.id is varchar: ADD
|
||||
has_many :plays, :class_name => "JamRuby::PlayablePlay", :as => :playable, :dependent => :destroy
|
||||
|
||||
has_many :jam_track_session, :class_name => "JamRuby::JamTrackSession"
|
||||
|
||||
# when we know what JamTrack this refund is related to, these are associated
|
||||
belongs_to :recurly_transactions, class_name: 'JamRuby::RecurlyTransactionWebHook'
|
||||
|
||||
accepts_nested_attributes_for :jam_track_tracks, allow_destroy: true
|
||||
accepts_nested_attributes_for :jam_track_tap_ins, allow_destroy: true
|
||||
|
||||
|
||||
# we can make sure a few things stay in sync here.
|
||||
# 1) the reproduction_royalty_amount has to stay in sync based on duration
|
||||
# 2) the onboarding_exceptions JSON column
|
||||
after_save :sync_reproduction_royalty
|
||||
after_save :sync_onboarding_exceptions
|
||||
|
||||
# NEW TO LAMBDA
|
||||
# create storage directory that will house this jam_track, as well as
|
||||
def store_dir
|
||||
"jam_track_tracks"
|
||||
end
|
||||
|
||||
# NEW TO LAMBDA
|
||||
def licensor_suffix
|
||||
suffix = ''
|
||||
if licensor
|
||||
raise "no licensor name" if licensor.name.nil?
|
||||
suffix = " - #{licensor.name}"
|
||||
end
|
||||
suffix
|
||||
end
|
||||
|
||||
# NEW TO LAMBDA
|
||||
def generate_s3_host_dir()
|
||||
if self.s3_audio_dir.nil?
|
||||
raise "original_artist is nil" if original_artist.nil?
|
||||
raise "name is nil" if name.nil?
|
||||
raise "licensor_suffix is nil" if licensor_suffix.nil?
|
||||
self.s3_audio_dir = "#{store_dir}/#{original_artist}/#{name}#{licensor_suffix}"
|
||||
end
|
||||
self.s3_audio_dir
|
||||
end
|
||||
|
||||
def increment_version!
|
||||
self.version = version.to_i + 1
|
||||
save!
|
||||
end
|
||||
|
||||
def sync_reproduction_royalty
|
||||
|
||||
# reproduction royalty table based on duration
|
||||
|
||||
# The statutory mechanical royalty rate for permanent digital downloads is:
|
||||
# 9.10¢ per copy for songs 5 minutes or less, or
|
||||
# 1.75¢ per minute or fraction thereof, per copy for songs over 5 minutes.
|
||||
# So the base rate is 9.1 cents for anything up to 5 minutes.
|
||||
# 5.01 to 6 minutes should be 10.5 cents.
|
||||
# 6.01 to 7 minutes should be 12.25 cents.
|
||||
# Etc.
|
||||
|
||||
royalty = nil
|
||||
if self.duration
|
||||
minutes = (self.duration - 1) / 60
|
||||
extra_minutes = minutes - 4
|
||||
extra_minutes = 0 if extra_minutes < 0
|
||||
royalty = (0.091 + (0.0175 * extra_minutes)).round(5)
|
||||
end
|
||||
self.update_column(:reproduction_royalty_amount, royalty)
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
def sync_onboarding_exceptions
|
||||
|
||||
exceptions = {}
|
||||
if self.duration.nil?
|
||||
exceptions[:no_duration] = true
|
||||
end
|
||||
|
||||
if self.genres.count == 0
|
||||
exceptions[:no_genres] = true
|
||||
end
|
||||
|
||||
if self.year.nil?
|
||||
exceptions[:no_year] = true
|
||||
end
|
||||
|
||||
if self.licensor.nil?
|
||||
exceptions[:no_licensor] = true
|
||||
end
|
||||
|
||||
if self.missing_instrument_info?
|
||||
exceptions[:unknown_instrument] = true
|
||||
end
|
||||
|
||||
if self.master_track.nil?
|
||||
exceptions[:no_master] = true
|
||||
end
|
||||
|
||||
if missing_previews?
|
||||
exceptions[:missing_previews] = true
|
||||
end
|
||||
|
||||
if duplicate_positions?
|
||||
exceptions[:duplicate_positions] = true
|
||||
end
|
||||
|
||||
if exceptions.keys.length == 0
|
||||
self.update_column(:onboarding_exceptions, nil)
|
||||
else
|
||||
self.update_column(:onboarding_exceptions, exceptions)
|
||||
end
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
def sale_display(variant = nil)
|
||||
if variant == ShoppingCart::JAMTRACK_FULL
|
||||
variant_desc = 'FULL'
|
||||
elsif variant == ShoppingCart::JAMTRACK_DOWNLOAD
|
||||
variant_desc = 'UPRGADE'
|
||||
elsif variant == ShoppingCart::JAMTRACK_STREAM
|
||||
variant_desc = 'FOR USE ONLY WITHIN APP'
|
||||
else
|
||||
variant_desc = 'UNKNOWN'
|
||||
end
|
||||
|
||||
|
||||
"JamTrack: #{name} - #{variant_desc}"
|
||||
end
|
||||
|
||||
|
||||
def duplicate_positions?
|
||||
counter = {}
|
||||
jam_track_tracks.each do |track|
|
||||
count = counter[track.position]
|
||||
if count.nil?
|
||||
count = 0
|
||||
end
|
||||
counter[track.position] = count + 1
|
||||
end
|
||||
|
||||
duplicate = false
|
||||
counter.each do|position, count|
|
||||
if count > 1
|
||||
duplicate = true
|
||||
break
|
||||
end
|
||||
end
|
||||
duplicate
|
||||
end
|
||||
|
||||
def missing_instrument_info?
|
||||
missing_instrument_info = false
|
||||
self.jam_track_tracks.each do |track|
|
||||
if track.instrument_id == 'other' && (track.part == nil || track.part.start_with?('Other'))
|
||||
missing_instrument_info = true
|
||||
break
|
||||
end
|
||||
end
|
||||
missing_instrument_info
|
||||
end
|
||||
|
||||
def missing_previews?
|
||||
missing_preview = false
|
||||
self.jam_track_tracks.each do |track|
|
||||
unless track.has_preview?
|
||||
missing_preview = true
|
||||
break
|
||||
end
|
||||
end
|
||||
missing_preview
|
||||
end
|
||||
|
||||
def onboard_warnings
|
||||
warnings = []
|
||||
warnings << 'POSITIONS' if duplicate_positions?
|
||||
warnings << 'PREVIEWS'if missing_previews?
|
||||
warnings << 'DURATION' if duration.nil?
|
||||
warnings << 'JMEP' if jmep_json.blank?
|
||||
warnings.join(',')
|
||||
end
|
||||
|
||||
def band_jam_track_count
|
||||
JamTrack.where(original_artist: original_artist).count
|
||||
end
|
||||
|
||||
class << self
|
||||
# @return array[artist_name(string)]
|
||||
def all_artists
|
||||
JamTrack.select("original_artist").
|
||||
group("original_artist").
|
||||
order('original_artist').
|
||||
collect{|jam_track|jam_track.original_artist}
|
||||
end
|
||||
|
||||
# @return array[JamTrack] for given artist_name
|
||||
def tracks_for_artist(artist_name)
|
||||
JamTrack.where("original_artist=?", artist_name).all
|
||||
end
|
||||
|
||||
# special case of index
|
||||
def autocomplete(options, user)
|
||||
|
||||
if options[:match].blank?
|
||||
return {artists: [], songs: []}
|
||||
end
|
||||
|
||||
options[:show_purchased_only] = options[:show_purchased_only]
|
||||
|
||||
options[:limit] = options[:limit] || 5
|
||||
|
||||
options[:artist_search] = options[:match]
|
||||
artists, pager = artist_index(options, user)
|
||||
|
||||
options.delete(:artist_search)
|
||||
options[:song_search] = options[:match]
|
||||
options[:sort_by] = 'jamtrack'
|
||||
songs, pager = index(options, user)
|
||||
|
||||
{artists: artists, songs:songs}
|
||||
end
|
||||
|
||||
def purchase_stubs(user)
|
||||
JamTrack.
|
||||
select(['jam_tracks.id', :name, :original_artist, :year, 'jam_track_rights.created_at AS purchased_at']).
|
||||
joins(:jam_track_rights).
|
||||
where("jam_track_rights.user_id = ?", user.id).
|
||||
includes(:genres).
|
||||
order([:original_artist, :name])
|
||||
end
|
||||
|
||||
def index(options, user)
|
||||
if options[:page]
|
||||
page = options[:page].to_i
|
||||
per_page = options[:per_page].to_i
|
||||
|
||||
if per_page == 0
|
||||
# try and see if limit was specified
|
||||
limit = options[:limit]
|
||||
limit ||= 20
|
||||
limit = limit.to_i
|
||||
per_page = limit
|
||||
else
|
||||
limit = per_page
|
||||
end
|
||||
|
||||
start = (page -1 )* per_page
|
||||
else
|
||||
limit = options[:limit]
|
||||
limit ||= 20
|
||||
limit = limit.to_i
|
||||
|
||||
start = options[:start].presence
|
||||
start = start.to_i || 0
|
||||
|
||||
page = 1 + start/limit
|
||||
per_page = limit
|
||||
end
|
||||
|
||||
|
||||
query = JamTrack.joins(:jam_track_tracks)
|
||||
.paginate(page: page, per_page: per_page)
|
||||
|
||||
if options[:show_purchased_only]
|
||||
query = query.joins(:jam_track_rights)
|
||||
query = query.where("jam_track_rights.user_id = ?", user.id)
|
||||
end
|
||||
|
||||
if options[:search]
|
||||
tsquery = Search.create_tsquery(options[:search])
|
||||
if tsquery
|
||||
query = query.where("(search_tsv @@ to_tsquery('jamenglish', ?))", tsquery)
|
||||
end
|
||||
end
|
||||
|
||||
if options[:artist_search]
|
||||
tsquery = Search.create_tsquery(options[:artist_search])
|
||||
if tsquery
|
||||
query = query.where("(artist_tsv @@ to_tsquery('jamenglish', ?))", tsquery)
|
||||
end
|
||||
end
|
||||
|
||||
if options[:song_search]
|
||||
tsquery = Search.create_tsquery(options[:song_search])
|
||||
if tsquery
|
||||
query = query.where("(name_tsv @@ to_tsquery('jamenglish', ?))", tsquery)
|
||||
end
|
||||
end
|
||||
|
||||
if options[:artist].present?
|
||||
artist_param = options[:artist]
|
||||
# todo: add licensor option
|
||||
if artist_param == 'Stockton Helbing'
|
||||
licensor = JamTrackLicensor.find_by_name('Stockton Helbing')
|
||||
if licensor
|
||||
query = query.where(licensor_id: licensor.id)
|
||||
end
|
||||
else
|
||||
query = query.where("original_artist=?", options[:artist])
|
||||
end
|
||||
end
|
||||
|
||||
if options[:song].present?
|
||||
query = query.where("name=?", options[:song])
|
||||
end
|
||||
|
||||
if options[:id].present?
|
||||
query = query.where("jam_tracks.id=?", options[:id])
|
||||
end
|
||||
|
||||
if options[:group_artist]
|
||||
query = query.select("original_artist, array_agg(jam_tracks.id) AS id, MIN(name) AS name, MIN(description) AS description, MIN(recording_type) AS recording_type, MIN(original_artist) AS original_artist, MIN(songwriter) AS songwriter, MIN(publisher) AS publisher, MIN(sales_region) AS sales_region, MIN(price) AS price, MIN(version) AS version")
|
||||
query = query.group("original_artist")
|
||||
query = query.order('jam_tracks.original_artist')
|
||||
query = query.includes([{ jam_track_tracks: :instrument }, { genres_jam_tracks: :genre }])
|
||||
else
|
||||
query = query.group("jam_tracks.id")
|
||||
if options[:sort_by] == 'jamtrack'
|
||||
query = query.order('jam_tracks.name')
|
||||
else
|
||||
query = query.order('jam_tracks.original_artist, jam_tracks.name')
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
||||
if (! user.try(:admin) && 'development' != Rails.env)
|
||||
query = query.where("jam_tracks.status = ?", 'Production')
|
||||
end
|
||||
|
||||
unless options[:genre].blank?
|
||||
query = query.joins(:genres)
|
||||
query = query.where('genre_id = ? ', options[:genre])
|
||||
end
|
||||
|
||||
query = query.where("jam_track_tracks.instrument_id = '#{options[:instrument]}' and jam_track_tracks.track_type = 'Track'") unless options[:instrument].blank?
|
||||
query = query.where("jam_tracks.sales_region = '#{options[:availability]}'") unless options[:availability].blank?
|
||||
|
||||
# FIXME: n+1 queries for rights and genres
|
||||
# query = query.includes([{ jam_track_tracks: :instrument },
|
||||
# :jam_track_tap_ins,
|
||||
# :jam_track_rights,
|
||||
# :genres])
|
||||
# { genres_jam_tracks: :genre },
|
||||
# query = query.includes([{ jam_track_tracks: :instrument },
|
||||
# { genres_jam_tracks: :genre }])
|
||||
|
||||
count = query.total_entries
|
||||
|
||||
if count == 0
|
||||
[query, nil, count]
|
||||
elsif query.length < limit
|
||||
[query, nil, count]
|
||||
else
|
||||
[query, start + limit, count]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# provides artist names and how many jamtracks are available for each
|
||||
def artist_index(options, user)
|
||||
if options[:page]
|
||||
page = options[:page].to_i
|
||||
per_page = options[:per_page].to_i
|
||||
|
||||
if per_page == 0
|
||||
# try and see if limit was specified
|
||||
limit = options[:limit]
|
||||
limit ||= 100
|
||||
limit = limit.to_i
|
||||
else
|
||||
limit = per_page
|
||||
end
|
||||
|
||||
start = (page -1 )* per_page
|
||||
limit = per_page
|
||||
else
|
||||
limit = options[:limit]
|
||||
limit ||= 100
|
||||
limit = limit.to_i
|
||||
|
||||
start = options[:start].presence
|
||||
start = start.to_i || 0
|
||||
|
||||
page = 1 + start/limit
|
||||
per_page = limit
|
||||
end
|
||||
|
||||
|
||||
query = JamTrack.paginate(page: page, per_page: per_page)
|
||||
query = query.select("original_artist, count(original_artist) AS song_count")
|
||||
query = query.group("original_artist")
|
||||
query = query.order('jam_tracks.original_artist')
|
||||
|
||||
query = query.where("jam_tracks.status = ?", 'Production') unless user.admin
|
||||
|
||||
if options[:show_purchased_only]
|
||||
query = query.joins(:jam_track_rights)
|
||||
query = query.where("jam_track_rights.user_id = ?", user.id)
|
||||
end
|
||||
|
||||
if options[:artist_search]
|
||||
tsquery = Search.create_tsquery(options[:artist_search])
|
||||
if tsquery
|
||||
query = query.where("(artist_tsv @@ to_tsquery('jamenglish', ?))", tsquery)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
unless options[:genre].blank?
|
||||
query = query.joins(:genres)
|
||||
query = query.where('genre_id = ? ', options[:genre])
|
||||
end
|
||||
|
||||
query = query.where("jam_track_tracks.instrument_id = '#{options[:instrument]}'") unless options[:instrument].blank?
|
||||
query = query.where("jam_tracks.sales_region = '#{options[:availability]}'") unless options[:availability].blank?
|
||||
|
||||
|
||||
if query.length == 0
|
||||
[query, nil]
|
||||
elsif query.length < limit
|
||||
[query, nil]
|
||||
else
|
||||
[query, start + limit]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def click_track_file
|
||||
JamTrackFile.where(jam_track_id: self.id).where(file_type: 'ClickWav').first
|
||||
end
|
||||
|
||||
def click_track
|
||||
JamTrackTrack.where(jam_track_id: self.id).where(track_type: 'Click').first
|
||||
end
|
||||
|
||||
def has_count_in?
|
||||
has_count_in = false
|
||||
if jmep_json
|
||||
jmep = jmep_json
|
||||
|
||||
if jmep["Events"]
|
||||
events = jmep["Events"]
|
||||
metronome = nil
|
||||
events.each do |event|
|
||||
if event.has_key?("metronome")
|
||||
metronome = event["metronome"]
|
||||
break
|
||||
end
|
||||
end
|
||||
if metronome
|
||||
has_count_in = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
has_count_in
|
||||
end
|
||||
|
||||
def master_track
|
||||
JamTrackTrack.where(jam_track_id: self.id).where(track_type: 'Master').first
|
||||
end
|
||||
|
||||
def stem_tracks
|
||||
JamTrackTrack.where(jam_track_id: self.id).where("track_type = 'Track' or track_type = 'Click'")
|
||||
end
|
||||
|
||||
def can_download?(user)
|
||||
owners.include?(user)
|
||||
end
|
||||
|
||||
def right_for_user(user, variant = nil)
|
||||
|
||||
query = jam_track_rights.where("user_id=?", user)
|
||||
|
||||
if variant
|
||||
if variant == ShoppingCart::JAMTRACK_DOWNLOAD
|
||||
query = query.where('can_download', true)
|
||||
elsif variant == ShoppingCart::JAMTRACK_FULL
|
||||
query = query.where('can_download', true)
|
||||
elsif variant == ShoppingCart::JAMTRACK_STREAM
|
||||
|
||||
else
|
||||
throw 'unknown variant ' + variant
|
||||
end
|
||||
end
|
||||
query.first
|
||||
end
|
||||
|
||||
|
||||
def mixdowns_for_user(user)
|
||||
JamTrackMixdown.where(user_id: user.id).where(jam_track_id: self.id)
|
||||
end
|
||||
|
||||
def upgrade_price
|
||||
variant_price('download')
|
||||
end
|
||||
|
||||
def variant_price(variant)
|
||||
if variant == 'full'
|
||||
download_price
|
||||
elsif variant == 'download'
|
||||
download_price - price
|
||||
else
|
||||
price
|
||||
end
|
||||
end
|
||||
|
||||
def short_plan_code
|
||||
prefix = 'jamtrack-'
|
||||
plan_code[prefix.length..-1]
|
||||
end
|
||||
|
||||
# http://stackoverflow.com/questions/4308377/ruby-post-title-to-slug
|
||||
def sluggarize(field)
|
||||
field.downcase.strip.gsub(' ', '-').gsub(/[^\w-]/, '')
|
||||
end
|
||||
|
||||
def generate_slug
|
||||
self.slug = sluggarize(original_artist) + '-' + sluggarize(name)
|
||||
|
||||
if licensor && licensor.slug.present?
|
||||
#raise "no slug on licensor #{licensor.id}" if licensor.slug.nil?
|
||||
self.slug << "-" + licensor.slug
|
||||
end
|
||||
end
|
||||
|
||||
def gen_plan_code
|
||||
# remove all non-alphanumeric chars from artist as well as name
|
||||
artist_code = original_artist.gsub(/[^0-9a-z]/i, '').downcase
|
||||
name_code = name.gsub(/[^0-9a-z]/i, '').downcase
|
||||
self.plan_code = "jamtrack-#{artist_code[0...20]}-#{name_code}"
|
||||
|
||||
if licensor && licensor.slug
|
||||
raise "no slug on licensor #{licensor.id}" if licensor.slug.nil?
|
||||
self.plan_code << "-" + licensor.slug
|
||||
end
|
||||
|
||||
self.plan_code = self.plan_code[0...50] # make sure it's a max of 50 long
|
||||
|
||||
|
||||
end
|
||||
|
||||
def to_s
|
||||
"#{self.name} (#{self.original_artist})"
|
||||
end
|
||||
|
||||
def self.latestPurchase(user_id)
|
||||
JamTrackRight
|
||||
.select('created_at')
|
||||
.where(user_id: user_id)
|
||||
.order('created_at DESC')
|
||||
.limit(1)
|
||||
.first
|
||||
.try(:created_at)
|
||||
.to_i
|
||||
end
|
||||
|
||||
attr_accessor :preview_generate_error
|
||||
|
||||
before_save :jmep_json_generate
|
||||
validate :jmep_text_validate
|
||||
|
||||
def jmep_text_validate
|
||||
begin
|
||||
JmepManager.execute(self.jmep_text)
|
||||
rescue ArgumentError => err
|
||||
errors.add(:jmep_text, err.to_s)
|
||||
end
|
||||
end
|
||||
|
||||
def jmep_json_generate
|
||||
self.licensor_id = nil if self.licensor_id == ''
|
||||
self.jmep_json = nil if self.jmep_json == ''
|
||||
self.time_signature = nil if self.time_signature == ''
|
||||
|
||||
begin
|
||||
json_str = JmepManager.execute(self.jmep_text)
|
||||
self.jmep_json = json_str.nil? ? nil : JSON.parse(json_str)
|
||||
rescue ArgumentError => err
|
||||
#errors.add(:jmep_text, err.to_s)
|
||||
end
|
||||
end
|
||||
|
||||
# used in mobile simulate purchase
|
||||
def self.forsale(user)
|
||||
sql =<<SQL
|
||||
SELECT jt.* FROM jam_tracks jt
|
||||
WHERE jt.id NOT IN (
|
||||
SELECT jt.id
|
||||
FROM jam_tracks jt
|
||||
JOIN jam_track_rights AS jtr ON jtr.jam_track_id = jt.id
|
||||
WHERE jtr.user_id = '#{user.id}'
|
||||
)
|
||||
LIMIT 1
|
||||
SQL
|
||||
self.find_by_sql(sql).first
|
||||
end
|
||||
|
||||
def genre_name
|
||||
self.genres.first.try(:description)
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
module JamRuby
|
||||
|
||||
# holds a click track or precount file
|
||||
class JamTrackFile < ActiveRecord::Base
|
||||
include JamRuby::S3ManagerMixin
|
||||
|
||||
# there should only be one Master per JamTrack, but there can be N Track per JamTrack
|
||||
FILE_TYPE = %w{ClickWav ClickTxt Precount}
|
||||
|
||||
@@log = Logging.logger[JamTrackFile]
|
||||
|
||||
before_destroy :delete_s3_files
|
||||
|
||||
#attr_accessible :jam_track_id, :file_type, :filename, as: :admin
|
||||
#attr_accessible :url, :md5, :length, as: :admin
|
||||
|
||||
attr_accessor :original_audio_s3_path, :skip_uploader, :preview_generate_error
|
||||
|
||||
before_destroy :delete_s3_files
|
||||
|
||||
validates :file_type, inclusion: {in: FILE_TYPE }
|
||||
|
||||
belongs_to :jam_track, class_name: "JamRuby::JamTrack"
|
||||
|
||||
# create storage directory that will house this jam_track, as well as
|
||||
def store_dir
|
||||
"jam_track_files"
|
||||
end
|
||||
|
||||
def licensor_suffix
|
||||
suffix = ''
|
||||
if jam_track.licensor
|
||||
raise "no licensor name" if jam_track.licensor.name.nil?
|
||||
suffix = " - #{jam_track.licensor.name}"
|
||||
end
|
||||
suffix
|
||||
end
|
||||
|
||||
# create name of the file
|
||||
def filename(original_name)
|
||||
"#{store_dir}/#{jam_track.original_artist}/#{jam_track.name}#{licensor_suffix}/#{original_name}"
|
||||
end
|
||||
|
||||
def manually_uploaded_filename
|
||||
if click_wav?
|
||||
filename('click.wav')
|
||||
elsif click_txt?
|
||||
filename('click.txt')
|
||||
elsif precount?
|
||||
filename('precount.wav')
|
||||
else
|
||||
raise 'unknown file type: ' + file_type
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
def click_wav?
|
||||
track_type == 'ClickWav'
|
||||
end
|
||||
|
||||
def click_txt?
|
||||
track_type == 'ClickTxt'
|
||||
end
|
||||
|
||||
def precount?
|
||||
track_type == 'Precount'
|
||||
end
|
||||
|
||||
# creates a short-lived URL that has access to the object.
|
||||
# the idea is that this is used when a user who has the rights to this tries to download this JamTrack
|
||||
# we would verify their rights (can_download?), and generates a URL in response to the click so that they can download
|
||||
# but the url is short lived enough so that it wouldn't be easily shared
|
||||
def sign_url(expiration_time = 120)
|
||||
s3_manager.sign_url(self[url], {:expires => expiration_time, :response_content_type => 'audio/wav', :secure => true})
|
||||
end
|
||||
|
||||
def can_download?(user)
|
||||
# I think we have to make a special case for 'previews', but maybe that's just up to the controller to not check can_download?
|
||||
jam_track.owners.include?(user)
|
||||
end
|
||||
|
||||
|
||||
def delete_s3_files
|
||||
s3_manager.delete(self[:url]) if self[:url] && s3_manager.exists?(self[:url])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
module JamRuby
|
||||
class JamTrackLicensor < ActiveRecord::Base
|
||||
|
||||
table_name = 'jam_track_licensors'
|
||||
|
||||
#attr_accessible :name, :description, :attention, :address_line_1, :address_line_2,
|
||||
# :city, :state, :zip_code, :contact, :email, :phone, :slug, as: :admin
|
||||
|
||||
validates :name, presence: true, uniqueness: true, length: {maximum: 200}
|
||||
validates :description, length: {maximum: 1000}
|
||||
validates :attention, length: {maximum: 200}
|
||||
validates :address_line_1, length: {maximum: 200}
|
||||
validates :address_line_2, length: {maximum: 200}
|
||||
validates :city, length: {maximum: 200}
|
||||
validates :state, length: {maximum: 200}
|
||||
validates :zip_code, length: {maximum: 200}
|
||||
validates :contact, length: {maximum: 200}
|
||||
validates :email, length: {maximum: 200}
|
||||
validates :phone, length: {maximum: 200}
|
||||
|
||||
has_many :jam_tracks, :class_name => "JamRuby::JamTrack", foreign_key: 'licensor_id', :inverse_of => :licensor
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,314 @@
|
|||
module JamRuby
|
||||
|
||||
# describes an audio track (like the drums, or guitar) that comprises a JamTrack
|
||||
class JamTrackTrack < ActiveRecord::Base
|
||||
include JamRuby::S3ManagerMixin
|
||||
include JamRuby::S3PublicManagerMixin
|
||||
|
||||
# there should only be one Master per JamTrack, but there can be N Track per JamTrack
|
||||
TRACK_TYPE = %w{Track Master Click}
|
||||
|
||||
@@log = Logging.logger[JamTrackTrack]
|
||||
|
||||
before_destroy :delete_s3_files
|
||||
|
||||
# Because JamTrackImporter imports audio files now, and because also the mere presence of this causes serious issues when updating the model (because reset of url_44 to something bogus), I've removed these
|
||||
#mount_uploader :url_48, JamTrackTrackUploader
|
||||
#mount_uploader :url_44, JamTrackTrackUploader
|
||||
|
||||
#attr_accessible :jam_track_id, :track_type, :instrument, :instrument_id, :position, :part, as: :admin
|
||||
#attr_accessible :url_44, :url_48, :md5_44, :md5_48, :length_44, :length_48, :preview_start_time_raw, as: :admin
|
||||
|
||||
attr_accessor :original_audio_s3_path, :skip_uploader, :preview_generate_error, :wav_file, :tmp_duration, :skip_inst_part_uniq
|
||||
|
||||
before_destroy :delete_s3_files
|
||||
|
||||
validates :position, presence: true, numericality: {only_integer: true}, length: {in: 1..1000}
|
||||
validates :part, length: {maximum: 35}
|
||||
validates :track_type, inclusion: {in: TRACK_TYPE }
|
||||
validates :preview_start_time, numericality: {only_integer: true}, length: {in: 1..1000}, :allow_nil => true
|
||||
validates_uniqueness_of :part, scope: [:jam_track_id, :instrument_id], unless: :skip_inst_part_uniq
|
||||
# validates :jam_track, presence: true
|
||||
|
||||
belongs_to :instrument, class_name: "JamRuby::Instrument"
|
||||
belongs_to :jam_track, class_name: "JamRuby::JamTrack"
|
||||
|
||||
has_many :recorded_jam_track_tracks, :class_name => "JamRuby::RecordedJamTrackTrack", :foreign_key => :jam_track_track_id, :dependent => :destroy
|
||||
has_one :jam_track_right, class_name: 'JamRuby::JamTrackRight', foreign_key: 'last_stem_id', inverse_of: :last_stem
|
||||
|
||||
# create storage directory that will house this jam_track, as well as
|
||||
def store_dir
|
||||
"jam_track_tracks"
|
||||
end
|
||||
|
||||
|
||||
def licensor_suffix
|
||||
suffix = ''
|
||||
if jam_track.licensor
|
||||
raise "no licensor name" if jam_track.licensor.name.nil?
|
||||
suffix = " - #{jam_track.licensor.name}"
|
||||
end
|
||||
suffix
|
||||
end
|
||||
|
||||
# NEW TO LAMBDA
|
||||
# create name of the file
|
||||
def filename(original_name)
|
||||
"#{jam_track.s3_audio_dir}/#{original_name}"
|
||||
end
|
||||
|
||||
# create name of the preview file.
|
||||
# md5-'ed because we cache forever
|
||||
def preview_filename(md5, ext='ogg')
|
||||
original_name = "#{File.basename(self["url_44"], ".ogg")}-preview-#{md5}.#{ext}"
|
||||
"#{preview_directory}/#{original_name}"
|
||||
end
|
||||
|
||||
def preview_directory
|
||||
"jam_track_previews/#{jam_track.original_artist}/#{jam_track.name}#{licensor_suffix}"
|
||||
end
|
||||
|
||||
def has_preview?
|
||||
!self["preview_url"].nil? && !self['preview_mp3_url'].nil?
|
||||
end
|
||||
|
||||
# generates a URL that points to a public version of the preview
|
||||
def preview_public_url(media_type='ogg')
|
||||
case media_type
|
||||
when 'ogg'
|
||||
url = self[:preview_url]
|
||||
when 'mp3'
|
||||
url = self[:preview_mp3_url]
|
||||
when 'aac'
|
||||
url = self[:preview_aac_url]
|
||||
else
|
||||
raise "unknown media_type #{media_type}"
|
||||
end
|
||||
if url
|
||||
s3_public_manager.public_url(url,{ :secure => true})
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def display_name
|
||||
if track_type == 'Master'
|
||||
'Master Mix'
|
||||
else
|
||||
display_part = ''
|
||||
if part
|
||||
display_part = "-(#{part})"
|
||||
end
|
||||
"#{instrument.description}#{display_part}"
|
||||
end
|
||||
end
|
||||
|
||||
def manually_uploaded_filename(mounted_as)
|
||||
if track_type == 'Master'
|
||||
filename("Master Mix-#{mounted_as == :url_48 ? '48000' : '44100'}.ogg")
|
||||
else
|
||||
filename("#{jam_track.name} Stem - #{instrument.description}-#{part}-#{mounted_as == :url_48 ? '48000' : '44100'}.ogg")
|
||||
end
|
||||
end
|
||||
|
||||
def master?
|
||||
track_type == 'Master'
|
||||
end
|
||||
|
||||
def url_by_sample_rate(sample_rate=48)
|
||||
field_name = (sample_rate==48) ? "url_48" : "url_44"
|
||||
self[field_name]
|
||||
end
|
||||
# creates a short-lived URL that has access to the object.
|
||||
# the idea is that this is used when a user who has the rights to this tries to download this JamTrack
|
||||
# we would verify their rights (can_download?), and generates a URL in response to the click so that they can download
|
||||
# but the url is short lived enough so that it wouldn't be easily shared
|
||||
def sign_url(expiration_time = 120, sample_rate=48)
|
||||
s3_manager.sign_url(url_by_sample_rate(sample_rate), {:expires => expiration_time, :response_content_type => 'audio/ogg', :secure => true})
|
||||
end
|
||||
|
||||
def web_download_sign_url(expiration_time = 120, type='mp3', content_type = nil, response_content_disposition = nil)
|
||||
options = {:expires => expiration_time, :secure => true}
|
||||
options[:response_content_type] = content_type if content_type
|
||||
options[:response_content_disposition] = response_content_disposition if response_content_disposition
|
||||
|
||||
url_field = self['url_' + type + '_48']
|
||||
url_field = self['url_48'] if type == 'ogg' # ogg has different column format in database
|
||||
|
||||
|
||||
s3_manager.sign_url(url_field, options)
|
||||
end
|
||||
|
||||
def can_download?(user)
|
||||
# I think we have to make a special case for 'previews', but maybe that's just up to the controller to not check can_download?
|
||||
jam_track.owners.include?(user)
|
||||
end
|
||||
|
||||
def move_up
|
||||
#normalize_position
|
||||
if self.position > 1
|
||||
# Switch with previous
|
||||
previous_track = self.jam_track.jam_track_tracks.where("position=?", self.position-1).first
|
||||
if previous_track
|
||||
JamTrack.transaction do
|
||||
previous_track.position,self.position = self.position,previous_track.position
|
||||
previous_track.save(validate:false)
|
||||
self.save(validate:false)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def move_down
|
||||
count=normalize_position
|
||||
if self.position < count
|
||||
# Switch with next:
|
||||
next_track = self.jam_track.jam_track_tracks.where("position=?", self.position+1).first
|
||||
if next_track
|
||||
next_track.position,self.position = self.position,next_track.position
|
||||
next_track.save(validate:false)
|
||||
self.save(validate:false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def delete_s3_files
|
||||
s3_manager.delete(self[:url_44]) if self[:url_44] && s3_manager.exists?(self[:url_44])
|
||||
s3_manager.delete(self[:url_48]) if self[:url_48] && s3_manager.exists?(self[:url_48])
|
||||
s3_public_manager.delete(self[:preview_url]) if self[:preview_url] && s3_public_manager.exists?(self[:preview_url])
|
||||
s3_public_manager.delete(self[:preview_mp3_url]) if self[:preview_mp3_url] && s3_public_manager.exists?(self[:preview_mp3_url])
|
||||
end
|
||||
|
||||
|
||||
|
||||
def generate_preview
|
||||
|
||||
begin
|
||||
Dir.mktmpdir do |tmp_dir|
|
||||
|
||||
input = File.join(tmp_dir, 'in.ogg')
|
||||
|
||||
raise 'no track' unless self["url_44"]
|
||||
|
||||
s3_manager.download(self.url_by_sample_rate(44), input)
|
||||
|
||||
process_preview(input, tmp_dir)
|
||||
end
|
||||
rescue Exception => e
|
||||
@@log.error("error in sox command #{e.to_s}")
|
||||
@preview_generate_error = e.to_s
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
# input is the original ogg file for the track. tmp_dir is where this code can safely generate output stuff and have it cleaned up later
|
||||
def process_preview(input, tmp_dir)
|
||||
uuid = SecureRandom.uuid
|
||||
output = File.join(tmp_dir, "#{uuid}.ogg")
|
||||
output_mp3 = File.join(tmp_dir, "#{uuid}.mp3")
|
||||
output_aac = File.join(tmp_dir, "#{uuid}.aac")
|
||||
|
||||
start = self.preview_start_time.to_f / 1000
|
||||
stop = start + 20
|
||||
|
||||
command = "sox \"#{input}\" \"#{output}\" trim #{sprintf("%.3f", start)} =#{sprintf("%.3f", stop)}"
|
||||
|
||||
@@log.debug("trimming using: " + command)
|
||||
|
||||
sox_output = `#{command}`
|
||||
|
||||
result_code = $?.to_i
|
||||
|
||||
if result_code != 0
|
||||
@@log.debug("fail #{result_code}")
|
||||
@preview_generate_error = "unable to execute cut command #{sox_output}"
|
||||
else
|
||||
# now create mp3 off of ogg preview
|
||||
|
||||
convert_mp3_cmd = "#{APP_CONFIG.ffmpeg_path_mp3} -i \"#{output}\" -ab 192k \"#{output_mp3}\""
|
||||
@@log.debug("converting to mp3 using: " + convert_mp3_cmd)
|
||||
|
||||
convert_output = `#{convert_mp3_cmd}`
|
||||
|
||||
result_code = $?.to_i
|
||||
|
||||
if result_code != 0
|
||||
@@log.debug("fail #{result_code}")
|
||||
@preview_generate_error = "unable to execute mp3 convert command #{convert_output}"
|
||||
else
|
||||
|
||||
convert_aac_cmd = "#{APP_CONFIG.ffmpeg_path} -i \"#{output}\" -c:a libfdk_aac -b:a 192k \"#{output_aac}\""
|
||||
@@log.debug("converting to aac using: " + convert_aac_cmd)
|
||||
|
||||
convert_output = `#{convert_aac_cmd}`
|
||||
|
||||
result_code = $?.to_i
|
||||
|
||||
if result_code != 0
|
||||
@@log.debug("fail #{result_code}")
|
||||
@preview_generate_error = "unable to execute aac convert command #{convert_output}"
|
||||
else
|
||||
|
||||
ogg_digest = ::Digest::MD5.file(output)
|
||||
mp3_digest = ::Digest::MD5.file(output_mp3)
|
||||
aac_digest = ::Digest::MD5.file(output_aac)
|
||||
self["preview_md5"] = ogg_md5 = ogg_digest.hexdigest
|
||||
self["preview_mp3_md5"] = mp3_md5 = mp3_digest.hexdigest
|
||||
self["preview_aac_md5"] = aac_md5 = mp3_digest.hexdigest
|
||||
|
||||
@@log.debug("uploading ogg preview to #{self.preview_filename('ogg')}")
|
||||
s3_public_manager.upload(self.preview_filename(ogg_md5, 'ogg'), output, content_type: 'audio/ogg', content_md5: ogg_digest.base64digest)
|
||||
@@log.debug("uploading mp3 preview to #{self.preview_filename('mp3')}")
|
||||
s3_public_manager.upload(self.preview_filename(mp3_md5, 'mp3'), output_mp3, content_type: 'audio/mpeg', content_md5: mp3_digest.base64digest)
|
||||
@@log.debug("uploading aac preview to #{self.preview_filename('aac')}")
|
||||
s3_public_manager.upload(self.preview_filename(aac_md5, 'aac'), output_aac, content_type: 'audio/aac', content_md5: aac_digest.base64digest)
|
||||
|
||||
self.skip_uploader = true
|
||||
|
||||
original_ogg_preview_url = self["preview_url"]
|
||||
original_mp3_preview_url = self["preview_mp3_url"]
|
||||
original_aac_preview_url = self["preview_aac_url"]
|
||||
|
||||
self["preview_url"] = self.preview_filename(ogg_md5, 'ogg')
|
||||
self["preview_length"] = File.new(output).size
|
||||
self["preview_mp3_url"] = self.preview_filename(mp3_md5, 'mp3')
|
||||
self["preview_mp3_length"] = File.new(output_mp3).size
|
||||
self["preview_aac_url"] = self.preview_filename(aac_md5, 'aac')
|
||||
self["preview_aac_length"] = File.new(output_aac).size
|
||||
self.save!
|
||||
|
||||
# if all that worked, now delete old previews, if present
|
||||
begin
|
||||
s3_public_manager.delete(original_ogg_preview_url) if original_ogg_preview_url && original_ogg_preview_url != self["preview_url"]
|
||||
s3_public_manager.delete(original_mp3_preview_url) if original_mp3_preview_url && original_mp3_preview_url != track["preview_mp3_url"]
|
||||
s3_public_manager.delete(original_aac_preview_url) if original_aac_preview_url && original_aac_preview_url != track["preview_aac_url"]
|
||||
rescue
|
||||
puts "UNABLE TO CLEANUP OLD PREVIEW URL"
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
private
|
||||
def normalize_position
|
||||
parent = self.jam_track
|
||||
position = 0
|
||||
if parent
|
||||
JamTrack.transaction do
|
||||
parent.jam_track_tracks.each do |jtt|
|
||||
position += 1
|
||||
if jtt.position != position
|
||||
jtt.position = position
|
||||
jtt.save(validate:false)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
position
|
||||
end # normalize_position
|
||||
|
||||
end # class
|
||||
end # module
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
Gem::Specification.new do |spec|
|
||||
spec.name = "shared"
|
||||
spec.version = "0.1.0"
|
||||
spec.authors = ["Your Name"]
|
||||
spec.email = ["your_email@example.com"]
|
||||
|
||||
spec.summary = "Shared library for Lambda functions"
|
||||
spec.description = "A shared Ruby library for common functionality used by multiple AWS Lambda functions."
|
||||
spec.license = "MIT"
|
||||
|
||||
# Files to include in the gem
|
||||
spec.files = Dir.glob("lib/**/*.rb") + ["README.md"]
|
||||
|
||||
# Entry point for the library
|
||||
spec.require_paths = ["lib"]
|
||||
|
||||
# Dependencies
|
||||
spec.add_dependency "aws-sdk-s3", "~> 1.0"
|
||||
spec.add_dependency "json", "~> 2.0"
|
||||
|
||||
# Development dependencies (optional)
|
||||
spec.add_development_dependency "rspec", "~> 3.0"
|
||||
end
|
||||
|
||||
|
|
@ -0,0 +1,205 @@
|
|||
AWSTemplateFormatVersion: '2010-09-09'
|
||||
Transform: AWS::Serverless-2016-10-31
|
||||
|
||||
Parameters:
|
||||
Environment:
|
||||
Type: String
|
||||
AllowedValues:
|
||||
- dev
|
||||
- prod
|
||||
Description: The environment (e.g., dev or prod)
|
||||
TencyZipsBucket:
|
||||
Type: String
|
||||
Description: Where tency uploads their original zips
|
||||
TencyJamTracksBucket:
|
||||
Type: String
|
||||
Description: Where we unzip their zipped files
|
||||
EfsId:
|
||||
Type: String
|
||||
Description: The ID of the EFS to use for scratch
|
||||
VpcId:
|
||||
Type: String
|
||||
Description: The ID of the VPC where the Lambda function and EFS are deployed.
|
||||
SubnetIds:
|
||||
Type: CommaDelimitedList
|
||||
Description: The IDs of the subnets where the Lambda function will be deployed.
|
||||
SgIds:
|
||||
Type: CommaDelimitedList
|
||||
Description: The Id Of the security group
|
||||
MountPath:
|
||||
Type: String
|
||||
Description: The path to mount the EFS volume into the lamdda
|
||||
JamTrackContainerPath:
|
||||
Type: String
|
||||
Description: The local or container registry path to the jamtrack container
|
||||
DbHost:
|
||||
Type: String
|
||||
Description: potsgresql host
|
||||
DbUser:
|
||||
Type: String
|
||||
Description: postgresql user
|
||||
DbPass:
|
||||
Type: String
|
||||
Description: postgresql pass
|
||||
DbName:
|
||||
Type: String
|
||||
Description: db name
|
||||
AwsBucket:
|
||||
Type: String
|
||||
Description: aws bucket
|
||||
AwsBucketPublic:
|
||||
Type: String
|
||||
Description: aws bucket public
|
||||
|
||||
Globals:
|
||||
Function:
|
||||
#Runtime: ruby3.3
|
||||
CodeUri: ./
|
||||
Architectures:
|
||||
- x86_64
|
||||
#Handler: app.lambda_handler
|
||||
|
||||
Resources:
|
||||
TencyUnzipFunction:
|
||||
Type: AWS::Serverless::Function
|
||||
Properties:
|
||||
#Handler: lambdas/unzipper/app.lambda_handler
|
||||
#Runtime: ruby3.3
|
||||
# Layers:
|
||||
# - !Ref AssetLayer
|
||||
# - !Ref PGLayer
|
||||
# - !Ref SharedLayer
|
||||
MemorySize: 500
|
||||
Timeout: 900
|
||||
PackageType: Image
|
||||
ImageUri: !Ref JamTrackContainerPath
|
||||
FileSystemConfigs:
|
||||
- Arn: !GetAtt EFSMountTarget.Arn
|
||||
LocalMountPath: /mnt/efs
|
||||
VpcConfig:
|
||||
SubnetIds: !Ref SubnetIds
|
||||
SecurityGroupIds: !Ref SgIds
|
||||
Role: !GetAtt ZipExtractorFunctionRole.Arn
|
||||
Environment:
|
||||
Variables:
|
||||
ENV: !Ref Environment
|
||||
DB_HOST: !Ref DbHost
|
||||
DB_USER: !Ref DbUser
|
||||
DB_PASS: !Ref DbPass
|
||||
DB_NAME: !Ref DbName
|
||||
TENCY_ZIPS_BUCKET: !Ref TencyZipsBucket
|
||||
TENCY_JAMTRACKS_BUCKET: !Ref TencyJamTracksBucket
|
||||
AWS_BUCKET: !Ref AwsBucket
|
||||
AWS_BUCKET_PUBLIC: !Ref AwsBucketPublic
|
||||
FFMPEG_PATH: /opt/bin/ffmpeg
|
||||
FFMPEG_PATH_MP3: /opt/bin/ffmpeg
|
||||
JMEP_DIR: /var/task/shared/jmep
|
||||
END_ON_FAIL: 1
|
||||
MOUNT_PATH: !Ref MountPath
|
||||
# AssetLayer:
|
||||
# Type: AWS::Serverless::LayerVersion
|
||||
# Properties:
|
||||
# LayerName: asset-layer
|
||||
# ContentUri: s3://jamkazam-repo/lambda-assets/jamtrack-importer-assets.zip
|
||||
# ContentUri: ./assets/assets.zip
|
||||
# CompatibleRuntimes:
|
||||
# - ruby3.3
|
||||
# PGLayer:
|
||||
# Type: AWS::Serverless::LayerVersion
|
||||
# Properties:
|
||||
# CompatibleRuntimes:
|
||||
# - ruby3.3
|
||||
# ContentUri: 'pg_layer'
|
||||
# Metadata:
|
||||
# BuildMethod: makefile
|
||||
|
||||
# SharedLayer:
|
||||
# Type: AWS::Serverless::LayerVersion
|
||||
# Properties:
|
||||
# LayerName: shared-layer
|
||||
# Description: Shared code layer for Lambda functions
|
||||
# ContentUri: shared/
|
||||
# CompatibleRuntimes:
|
||||
# - ruby3.3
|
||||
|
||||
# IAM Role for Lambda
|
||||
ZipExtractorFunctionRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Version: "2012-10-17"
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: lambda.amazonaws.com
|
||||
Action: sts:AssumeRole
|
||||
ManagedPolicyArns:
|
||||
- !Ref EFSFullAccessPolicy
|
||||
- "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
|
||||
Policies:
|
||||
- PolicyName: S3AccessPolicy
|
||||
PolicyDocument:
|
||||
Version: "2012-10-17"
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- "s3:ListBucket"
|
||||
- "s3:GetObject"
|
||||
Resource:
|
||||
- !Sub "arn:aws:s3:::${TencyZipsBucket}"
|
||||
- !Sub "arn:aws:s3:::${TencyZipsBucket}/*"
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- "s3:ListBucket"
|
||||
- "s3:GetObject"
|
||||
- "s3:PutObject"
|
||||
Resource:
|
||||
- !Sub "arn:aws:s3:::${TencyJamTracksBucket}"
|
||||
- !Sub "arn:aws:s3:::${TencyJamTracksBucket}/*"
|
||||
- PolicyName: VPCNetworkingPolicy
|
||||
PolicyDocument:
|
||||
Version: "2012-10-17"
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- "ec2:CreateNetworkInterface"
|
||||
- "ec2:DescribeNetworkInterfaces"
|
||||
- "ec2:DeleteNetworkInterface"
|
||||
Resource: "*" # You can narrow this down by specifying resources if needed
|
||||
# Managed Policy for EFS Access
|
||||
EFSFullAccessPolicy:
|
||||
Type: AWS::IAM::ManagedPolicy
|
||||
Properties:
|
||||
Description: EFS full access for Lambda
|
||||
PolicyDocument:
|
||||
Version: "2012-10-17"
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- "elasticfilesystem:ClientMount"
|
||||
- "elasticfilesystem:ClientWrite"
|
||||
Resource: !GetAtt EFSMountTarget.Arn
|
||||
|
||||
|
||||
# EFS Mount Target
|
||||
# MountTaget1:
|
||||
# Type: AWS::EFS::MountTarget
|
||||
# Properties:
|
||||
# FileSystemId: !Ref EfsId
|
||||
# SubnetId: !Select [0, !Ref SubnetIds] # us-east1
|
||||
# SecurityGroups: [!Ref SgId]
|
||||
|
||||
# EFS Mount Target for Lambda
|
||||
EFSMountTarget:
|
||||
Type: AWS::EFS::AccessPoint
|
||||
Properties:
|
||||
FileSystemId: !Ref EfsId
|
||||
PosixUser:
|
||||
Uid: "1000"
|
||||
Gid: "1000"
|
||||
RootDirectory:
|
||||
CreationInfo:
|
||||
OwnerUid: "1000"
|
||||
OwnerGid: "1000"
|
||||
Permissions: "0777"
|
||||
Path: !Ref MountPath
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue