Powered by AppSignal & Oban Pro
Would you like to see your link here? Contact us

Cloudflare R2

livebooks/cloudflare/r2.livemd

Cloudflare R2

Mix.install([
  {:ex_aws, "~> 2.5"},
  {:ex_aws_s3, "~> 2.4"},
  {:poison, "~> 5.0"},
  {:hackney, "~> 1.20"},
  {:sweet_xml, "~> 0.7"},
  {:explorer, "~> 0.9"},
  {:evision, "~> 0.2"},
  {:req, "~> 0.5"},
  {:kino, "~> 0.14"}
])

準備

alias ExAws.S3
alias Explorer.DataFrame
alias Explorer.Series
require Explorer.DataFrame

認証

access_key_id_input = Kino.Input.password("ACCESS_KEY_ID")
secret_access_key_input = Kino.Input.password("SECRET_ACCESS_KEY")
endpoint_host_input = Kino.Input.text("ENDPOINT_HOST")

[
  access_key_id_input,
  secret_access_key_input,
  endpoint_host_input
]
|> Kino.Layout.grid(columns: 3)
auth_config = [
  access_key_id: Kino.Input.read(access_key_id_input),
  secret_access_key: Kino.Input.read(secret_access_key_input),
  host: Kino.Input.read(endpoint_host_input)
]

Kino.nothing()

バケット一覧の取得

S3.list_buckets()
|> ExAws.request(auth_config)
S3.list_buckets()
|> ExAws.request!(auth_config)
|> then(& &1.body.buckets)
|> DataFrame.new()
|> DataFrame.select(["name", "creation_date"])
|> Kino.DataTable.new()

ファイル一覧の取得

bucket_name_input = Kino.Input.text("BUCKET_ANME")
{contents, next_continuation_token} =
  bucket_name_input
  |> Kino.Input.read()
  |> S3.list_objects_v2(max_keys: 10)
  |> ExAws.request!(auth_config)
  |> then(&{&1.body.contents, &1.body.next_continuation_token})
bucket_name_input
|> Kino.Input.read()
|> S3.list_objects_v2(max_keys: 10, continuation_token: next_continuation_token)
|> ExAws.request!(auth_config)
|> then(&{&1.body.contents, &1.body.next_continuation_token})
defmodule S3LS do
  def get_contents(continuation_token, bucket_name, auth_config) do
    bucket_name
    |> S3.list_objects_v2(max_keys: 10, continuation_token: continuation_token)
    |> ExAws.request!(auth_config)
    |> then(&{&1.body.contents, &1.body.next_continuation_token})
  end

  def get_contents_cyclic(continuation_token, bucket_name, auth_config) do
    {contents, next_token} = get_contents(continuation_token, bucket_name, auth_config)

    case next_token do
      # 空であれば次ページを取得しない
      "" ->
        contents

      # 空以外の場合は次ページを取得する
      _ ->
        contents ++ get_contents_cyclic(next_token, bucket_name, auth_config)
    end
  end

  def get_all_contents(bucket_name, auth_config) do
    get_contents_cyclic(nil, bucket_name, auth_config)
  end
end
all_contents =
  bucket_name_input
  |> Kino.Input.read()
  |> S3LS.get_all_contents(auth_config)
all_contents_df =
  all_contents
  |> DataFrame.new()
  |> DataFrame.select(["key", "last_modified", "size"])
  |> DataFrame.mutate(size: cast(size, :float))
  |> then(fn df ->
    DataFrame.put(
      df,
      "last_modified",
      df["last_modified"]
      |> Series.transform(fn input ->
        NaiveDateTime.from_iso8601!(input)
      end)
    )
  end)

all_contents_df
|> Kino.DataTable.new()
Series.sum(all_contents_df["size"])
Series.max(all_contents_df["size"])
all_contents_df
|> DataFrame.filter(size > 20_000)
|> Kino.DataTable.new()
all_contents_df
|> DataFrame.filter(last_modified < ~N[2024-02-09 05:43:42])
|> Kino.DataTable.new()
all_contents_df
|> then(fn df ->
  DataFrame.put(
    df,
    "dir",
    df["key"]
    |> Series.transform(fn input ->
      paths = String.split(input, "/")

      case Enum.count(paths) do
        1 ->
          ""

        _ ->
          Enum.at(paths, 0)
      end
    end)
  )
end)
|> DataFrame.group_by(["dir"])
|> DataFrame.summarise(
  count: count(size),
  size: sum(size)
)
|> DataFrame.sort_by(desc: size)
|> Kino.DataTable.new()

ファイルアップロード

アップロード用ファイル取得

ryo_path = "ryo-wakabayashi.jpg"

"https://www.elixirconf.eu/assets/images/ryo-wakabayashi.jpg"
|> Req.get!(into: File.stream!(ryo_path))

ファイル指定でアップロード

ryo_path
|> S3.Upload.stream_file()
|> S3.upload(Kino.Input.read(bucket_name_input), "ryo-wakabayashi.jpg")
|> ExAws.request!(auth_config)

インメモリからアップロード

mat = Evision.imread(ryo_path)
bucket_name_input
|> Kino.Input.read()
|> S3.put_object("ryo_2.jpg", Evision.imencode(".jpg", mat))
|> ExAws.request!(auth_config)
bucket_name_input
|> Kino.Input.read()
|> S3LS.get_all_contents(auth_config)
|> DataFrame.new()
|> DataFrame.select(["key", "last_modified", "size"])
|> Kino.DataTable.new()

ファイルダウンロード

bucket_name_input
|> Kino.Input.read()
|> S3.download_file("ryo-wakabayashi.jpg", "ryo_downloaded.jpg")
|> ExAws.request!(auth_config)
mat = Evision.imread("ryo_downloaded.jpg")

インメモリにダウンロード

bucket_name_input
|> Kino.Input.read()
|> S3.get_object("ryo-wakabayashi.jpg")
|> ExAws.request!(auth_config)
|> then(&amp;Evision.imdecode(&amp;1.body, Evision.Constant.cv_IMREAD_COLOR()))

ダウンロードして加工してアップロード

bucket_name = Kino.Input.read(bucket_name_input)

bucket_name
|> S3.get_object("ryo-wakabayashi.jpg")
|> ExAws.request!(auth_config)
|> then(&amp;Evision.imdecode(&amp;1.body, Evision.Constant.cv_IMREAD_COLOR()))
|> Evision.blur({9, 9})
|> then(&amp;S3.put_object(bucket_name, "ryo_blur.jpg", Evision.imencode(".jpg", &amp;1)))
|> ExAws.request!(auth_config)
bucket_name_input
|> Kino.Input.read()
|> S3.get_object("ryo_blur.jpg")
|> ExAws.request!(auth_config)
|> then(&amp;Evision.imdecode(&amp;1.body, Evision.Constant.cv_IMREAD_COLOR()))