動画の長さに合わせて無音ファイルをカット
$ ffmpeg -ss 0 -i mute30.wav -t 16.45 mute.wav
無音ファイルと動画を結合
$ ffmpeg -i in.mp4 -i mute.wav -vf scale=886:1920,setsar=1:1 out.mp4
import Darwin.POSIX.iconv | |
class Iconv { | |
enum Error: Swift.Error { | |
case illegalByteSequence | |
case invalidArgument | |
case invalidResult | |
case unknownError(errno: Int32) | |
} |
extension ObservableType where Element == Void { | |
func pagenation< | |
Token, | |
Source: ObservableConvertibleType, | |
PagingElement | |
>( | |
startWith token: Token, | |
selector: @escaping (Token) throws -> Source | |
) -> Observable<(usedToken: Token, PagingElement)> | |
where |
class A { | |
class var className: String { "A" } | |
init() {} | |
func setup() { | |
name = { | |
return Self.className | |
} | |
} | |
var name: (() -> String)? | |
} |
import DiffableDataSources | |
import UIKit | |
class CollectionViewConcatDataSource<SectionIdentifierType: Hashable>: NSObject, UICollectionViewDataSource { | |
typealias ItemIdentifierType = AnyHashable | |
public typealias SupplementaryViewProvider = (UICollectionView, String, IndexPath) -> UICollectionReusableView? | |
private var innerDataSource: CollectionViewDiffableDataSource<SectionIdentifierType, AnyHashable>! | |
private var adapterMap: [SectionIdentifierType: DiffableDataSourceSectionAdapterType] = [:] | |
var defaultSupplementaryViewProvider: SupplementaryViewProvider? = nil |
import Foundation | |
public struct Airport: Codable { | |
let name: String | |
let iata: String | |
let icao: String | |
let coordinates: [Double] | |
public struct Runway: Codable { | |
enum Surface: String, Codable { |
サーバ日付 | id | value | ||
---|---|---|---|---|
2021/03/24 11:03:04 | testing4 | 0 | ||
2021/03/24 11:03:04 | testing1 | 0 | ||
2021/03/24 11:04:07 | testing4 | 1 | ||
2021/03/24 11:04:07 | testing1 | 1 | ||
2021/03/24 11:04:08 | testing2 | 1 | ||
2021/03/24 11:05:11 | testing1 | 2 | 私物iPadでスケジュール中。電源に接続されたりされてなかったり、他のアプリを起動してたりしてなかったりする | |
2021/03/24 11:05:11 | testing4 | 2 | ||
2021/03/24 11:05:12 | testing2 | 2 | ||
2021/03/24 11:05:13 | testing3 | 2 | 〜ここでバックグラウンドにした〜 |
import Combine | |
@_functionBuilder | |
struct FlatMapBuilder { | |
static func buildBlock<C: Publisher>(_ component: C) -> C | |
{ | |
component | |
} | |
static func buildEither<F: Publisher, S: Publisher>(first component: F) -> EitherPublisher<F, S> |
#if !canImport(ObjectiveC) | |
import Foundation | |
import XCTest | |
// INFO: | |
//typealias XCTestCaseClosure = (XCTestCase) throws -> Void | |
//typealias XCTestCaseEntry = (testCaseClass: XCTestCase.Type, allTests: [(String, XCTestCaseClosure)]) | |
func testCase<T: XCTestCase>(_ allTests: [(String, (T) -> () async throws -> Void)]) -> XCTestCaseEntry { | |
let tests: [(String, XCTestCaseClosure)] = allTests.map { ($0.0, asyncTest($0.1)) } | |
return (T.self, tests) |
import SwiftUI | |
func longlongAsyncFunction() async { | |
let id = UUID() | |
print("\(id): start") | |
do { | |
try await withTaskCancellationHandler { | |
try await Task.sleep(nanoseconds: 1000 * 1000 * 1000 * 10) | |
print("\(id): end") | |
} onCancel: { |
動画の長さに合わせて無音ファイルをカット
$ ffmpeg -ss 0 -i mute30.wav -t 16.45 mute.wav
無音ファイルと動画を結合
$ ffmpeg -i in.mp4 -i mute.wav -vf scale=886:1920,setsar=1:1 out.mp4