class ElasticTranscoder extends AnyRef
- Source
- ElasticTranscoder.scala
- Alphabetic
- By Inheritance
- ElasticTranscoder
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
- new ElasticTranscoder()
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- def allPresets: List[Preset]
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
cancelJobs(pipeline: Pipeline, outputName: String = ""): Unit
Tries to cancel all jobs that match
outputName
, if specified.Tries to cancel all jobs that match
outputName
, if specified. Errors are ignored. Only jobs with a status ofSubmitted
can be cancelled. Race conditions are possible; jobs processed might start between the time they are listed and the time that cancellation is attempted. This means that some jobs might not be cancelled. This is a synchronous call. -
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @HotSpotIntrinsicCandidate() @throws( ... )
-
def
createJob(pipelineId: String, presets: List[Preset], inKey: String, outputKeyPrefix: String, outputKeys: List[String])(implicit etClient: AmazonElasticTranscoder): Job
Create an Elastic Transcoder job using the given pipeline, input key, presets, output inKey prefix and output keys.
Create an Elastic Transcoder job using the given pipeline, input key, presets, output inKey prefix and output keys.
- returns
Job created in Elastic Transcoder.
-
def
createJob(bucket: Bucket, pipelineId: String, inputKey: String, outputKey: String, presetId: String)(implicit s3: S3): Try[Job]
Deletes any pre-existing output file before starting the Job.
Deletes any pre-existing output file before starting the Job. If this output file was being served directly, in other words there is no CloudFront distribution for the output bucket, the output file will be unavailable until the job completes. If the job fails, there is no output file. TODO listen for job completion and if the output file existed previously, invalidate the output file so CloudFront can distribute the new version.
- def createPipeline(name: String, inputBucket: Bucket, outputBucket: Bucket): Try[Pipeline]
- def createPipelines(pipelineNames: List[String], inputBucket: Bucket, outputBucket: Bucket): List[Try[Pipeline]]
- def defaultPresets: List[Preset]
- def deletePipeline(pipeline: Pipeline): Unit
-
def
deletePipelines(): Unit
Deletes all pipelines associated with this AWS account.
Deletes all pipelines associated with this AWS account. Errors are ignored. When performing multiple deletions, a 300ms pause is inserted between requests. This is a synchronous call
- def dumpPipelines(): Unit
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- implicit val et: ElasticTranscoder
- implicit val etClient: AmazonElasticTranscoder
-
def
findCompletedJobByInputKeyName(key: String, pipelineName: String): Option[Job]
Find most recent job for specified key
-
def
findJobByInputKeyName(key: String, pipelineName: String): Option[Job]
Find most recent job for specified key
-
def
findJobByOutputKeyName(key: String, pipelineName: String): Option[Job]
Find most recent job for specified key
- def findJobByPipelineIdAndId(pipelineId: String, jobId: String): Option[Job]
-
def
findPipelineById(pipelineId: String): Option[Pipeline]
Returns Some(pipeline) for the first pipeline found with the given pipelineName associated this AWS account, or None if not found
-
def
findPipelineByName(pipelineName: String): Option[Pipeline]
Pipeline names need not be unique.
Pipeline names need not be unique. Returns Some(pipeline) for the first pipeline found with the given pipelineName associated this AWS account, or None if not found
-
def
findPipelinesByName(pipelineName: String): List[Pipeline]
Pipeline names need not be unique.
Pipeline names need not be unique. Returns List[Pipeline] containing all pipelines with the given pipelineName associated this AWS account
- def findPresetByName(name: String): Option[Preset]
- def findPresets(name: String): List[Preset]
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @HotSpotIntrinsicCandidate()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @HotSpotIntrinsicCandidate()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- def isJobInProgress(pipeline: Pipeline, outputName: String): Boolean
- def jobOutputs(status: String): List[JobOutput]
- def jobStatuses(pipeline: Pipeline): List[JobOutput]
- def jobs(pipeline: Pipeline): List[Job]
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @HotSpotIntrinsicCandidate()
-
def
pipelineExists(pipelineName: String): Boolean
Note that pipeline names need not be unique.
Note that pipeline names need not be unique. Returns true if a pipeline is found for this AWS account with the given pipelineName
- def presetCache: Map[String, List[Preset]]
- def presetsToHtml: Seq[String]
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
def
transcode(pipelineId: String, inputKey: String, outputKey: String)(implicit s3: S3): List[Try[String]]
- returns
list of job output keys
- def transcoderStatus(videoName: String, pipelineName: String): String
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )