Browse Source

test(project): 72.9% coverage; 60% passing.

tags/v0.0.1^2
jackyalcine 1 year ago
parent
commit
7a8f26c657
Signed by: me <yo@jacky.wtf> GPG Key ID: 36CD7728BDFD66FF
100 changed files with 3194 additions and 1757 deletions
  1. +138
    -0
      .credo.exs
  2. +24
    -0
      .drone.yml
  3. +3
    -2
      .editorconfig
  4. +2
    -2
      .formatter.exs
  5. +1
    -1
      .lvimrc
  6. +1
    -0
      .node-version
  7. +27
    -3
      .projections.json
  8. +1
    -1
      Dockerfile
  9. +13
    -0
      INSTALL.markdown
  10. +8
    -4
      README.markdown
  11. +1
    -1
      docker/scripts/build.sh
  12. +0
    -3
      docker/scripts/post-deploy.sh
  13. +3
    -0
      docker/scripts/pre-deploy.sh
  14. +7
    -1
      docker/scripts/prepare.sh
  15. +1
    -3
      lib/application.ex
  16. +1
    -1
      lib/cache.ex
  17. +18
    -0
      lib/format.ex
  18. +33
    -23
      lib/http.ex
  19. +4
    -5
      lib/indieweb/app.ex
  20. +12
    -15
      lib/indieweb/app/h_x_app.ex
  21. +20
    -31
      lib/indieweb/app/link.ex
  22. +0
    -60
      lib/indieweb/app/mf2.ex
  23. +3
    -4
      lib/indieweb/app/opengraph.ex
  24. +3
    -2
      lib/indieweb/app/schemaorg.ex
  25. +3
    -2
      lib/indieweb/app/webmanifest.ex
  26. +1
    -1
      lib/indieweb/auth/scope.ex
  27. +0
    -130
      lib/indieweb/jf2.ex
  28. +1
    -7
      lib/indieweb/mf2.ex
  29. +12
    -14
      lib/indieweb/mf2/remote.ex
  30. +7
    -5
      lib/indieweb/micropub.ex
  31. +5
    -12
      lib/indieweb/micropub/content.ex
  32. +229
    -143
      lib/indieweb/micropub/entry.ex
  33. +171
    -178
      lib/indieweb/post.ex
  34. +12
    -1
      lib/indieweb/relme.ex
  35. +23
    -28
      lib/indieweb/webmention.ex
  36. +14
    -13
      lib/page.ex
  37. +1
    -12
      lib/profile.ex
  38. +10
    -7
      lib/repo.ex
  39. +2
    -3
      lib/repo/category.ex
  40. +71
    -60
      lib/repo/entry.ex
  41. +3
    -14
      lib/repo/relme.ex
  42. +1
    -6
      lib/repo/setting.ex
  43. +10
    -14
      lib/repo/webmention.ex
  44. +122
    -159
      lib/storage.ex
  45. +43
    -0
      lib/storage/image.ex
  46. +80
    -0
      lib/storage/json.ex
  47. +0
    -154
      lib/storage/object_data.ex
  48. +0
    -82
      lib/storage/photo.ex
  49. +17
    -7
      lib/storage/video.ex
  50. +24
    -0
      lib/web.ex
  51. +6
    -3
      mix.exs
  52. +1
    -1
      mix.lock
  53. +0
    -1
      priv/repo/migrations/20181218212555_create_webmentions.exs
  54. +1
    -1
      priv/repo/migrations/20181218221941_send_only_one_webmention_per_source_and_target.exs
  55. +7
    -0
      priv/repo/migrations/20181221203932_use_type_instead_of_post_type_in_entries.exs
  56. +9
    -0
      priv/repo/migrations/20181221234809_add_deleted_at_to_entries.exs
  57. +9
    -0
      priv/repo/migrations/20181221234937_add_deleted_at_to_webmentions.exs
  58. +0
    -40
      test/fixtures/vcr_cassettes/attempts_successful_webmention_send.json
  59. +46
    -0
      test/fixtures/vcr_cassettes/fails_to_fetch_due_to_network_error.json
  60. +0
    -0
      test/fixtures/vcr_cassettes/fetch_force_refresh.json
  61. +47
    -0
      test/fixtures/vcr_cassettes/fetch_from_page.json
  62. +0
    -0
      test/fixtures/vcr_cassettes/fetches_cache_invalid.json
  63. +46
    -0
      test/fixtures/vcr_cassettes/page_is_empty.json
  64. +49
    -0
      test/fixtures/vcr_cassettes/refresh_of_mf2_data.json
  65. +0
    -0
      test/fixtures/vcr_cassettes/relme_failing_endpoint.json
  66. +0
    -0
      test/fixtures/vcr_cassettes/relme_redirecting_endpoint.json
  67. +0
    -1
      test/fixtures/vcr_cassettes/sends_successfully.json
  68. +11
    -4
      test/integration/controllers/auth_controller_test.exs
  69. +4
    -9
      test/integration/controllers/category_controller_test.exs
  70. +16
    -6
      test/integration/controllers/entry_controller_test.exs
  71. +186
    -83
      test/support/factory.ex
  72. +4
    -3
      test/test_helper.exs
  73. +74
    -7
      test/unit/http_test.exs
  74. +56
    -5
      test/unit/indieweb/app/h_x_app_test.exs
  75. +63
    -0
      test/unit/indieweb/app/link_test.exs
  76. +0
    -19
      test/unit/indieweb/app/mf2_test.exs
  77. +1
    -2
      test/unit/indieweb/auth/code_test.exs
  78. +22
    -3
      test/unit/indieweb/auth/scope_test.exs
  79. +0
    -84
      test/unit/indieweb/jf2_test.exs
  80. +49
    -45
      test/unit/indieweb/mf2/remote_test.exs
  81. +13
    -0
      test/unit/indieweb/mf2_test.exs
  82. +12
    -4
      test/unit/indieweb/micropub/content_test.exs
  83. +586
    -13
      test/unit/indieweb/micropub/entry_test.exs
  84. +27
    -0
      test/unit/indieweb/micropub_test.exs
  85. +252
    -52
      test/unit/indieweb/post_test.exs
  86. +61
    -6
      test/unit/indieweb/relme_test.exs
  87. +21
    -35
      test/unit/indieweb/webmention_test.exs
  88. +0
    -59
      test/unit/jackywtf_test.exs
  89. +12
    -0
      test/unit/koype_test.exs
  90. +45
    -0
      test/unit/page_test.exs
  91. +5
    -0
      test/unit/profile_test.exs
  92. +1
    -0
      test/unit/repo/category_test.exs
  93. +13
    -0
      test/unit/repo/entry_json_test.exs
  94. +4
    -4
      test/unit/repo/entry_test.exs
  95. +20
    -2
      test/unit/repo/setting_test.exs
  96. +68
    -6
      test/unit/repo/webmention_test.exs
  97. +40
    -0
      test/unit/storage/image_test.exs
  98. +81
    -0
      test/unit/storage/json_test.exs
  99. +0
    -45
      test/unit/storage/object_data_test.exs
  100. +40
    -0
      test/unit/storage/video_test.exs

+ 138
- 0
.credo.exs View File

@@ -0,0 +1,138 @@
# This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
included: ["lib/", "web/", "test/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
requires: [],
#
# Credo automatically checks for updates, like e.g. Hex does.
# You can disable this behaviour below:
check_for_updates: true,
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
strict: true,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.MultiAliasImportRequireUse},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},

# For some checks, like AliasUsage, you can only customize the priority
# Priority values are: `low, normal, high, higher`
{Credo.Check.Design.AliasUsage, priority: :low},

# For others you can set parameters

# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},

# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},

{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 100},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc, false},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},

{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart},
{Credo.Check.Refactor.UnlessWithElse},

{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.LazyLogging},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},

# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, true},
{Credo.Check.Refactor.VariableRebinding, true},
{Credo.Check.Warning.MapGetUnsafePass, false},

# Deprecated checks (these will be deleted after a grace period)
{Credo.Check.Readability.Specs, false},
{Credo.Check.Warning.NameRedeclarationByAssignment, false},
{Credo.Check.Warning.NameRedeclarationByCase, false},
{Credo.Check.Warning.NameRedeclarationByDef, false},
{Credo.Check.Warning.NameRedeclarationByFn, false},

# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}

+ 24
- 0
.drone.yml View File

@@ -10,6 +10,30 @@ services:
image: "redis:5-alpine"

steps:
- name: webdriver
image: selenium/hub:3.141.59-copernicium
detach: true
ports:
- "4444:4444"

- name:chrome
image: selenium/node-chrome:3.141.59-copernicium
detach: true
depends_on:
- webdriver
environment:
HUB_HOST: webdriver
HUB_PORT: 4444

- name: firefox:
image: selenium/node-firefox:3.141.59-copernicium
depends_on:
- webdriver
environment:
HUB_HOST: webdriver
HUB_PORT: 4444
detach: true

- name: objectstorage
image: "minio/minio:RELEASE.2018-11-22T02-51-56Z"
environment:


+ 3
- 2
.editorconfig View File

@@ -6,10 +6,11 @@ indent_style = space
indent_size = 2
end_of_line = lf
insert_final_newline = true
line_length = 80

[*.markdown,*.md]
line_length = 80

[*.ex,*.exs]
line_length = 80
max_line_length = 100
line_length = 100
max_line_length = 120

+ 2
- 2
.formatter.exs View File

@@ -1,4 +1,4 @@
[
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"],
line_length: 100,
inputs: ["mix.exs", "{web,config,lib,test}/**/*.{ex,exs}"],
line_length: 120,
]

+ 1
- 1
.lvimrc View File

@@ -5,7 +5,7 @@

DirenvExport

let s:command_prefix = "docker-compose run -e OBJECT_STORAGE_ASSET_HOST='http://localhost:4001/koype-test' -e MIX_ENV=test --no-deps --rm site"
let s:command_prefix = 'docker-compose run -e OBJECT_STORAGE_ASSET_HOST="http://localhost:4001/koype-test" -e MIX_ENV=test --no-deps --rm site'

function! DockerComposeTransform(cmd) abort
return s:command_prefix . ' ' . a:cmd


+ 1
- 0
.node-version View File

@@ -0,0 +1 @@
8.14.0

+ 27
- 3
.projections.json View File

@@ -1,10 +1,34 @@
{
"lib/web/controllers/*.ex": {
"lib/web/controllers/**/*.ex": {
"alternative": "test/integration/controllers/{}_test.exs",
"type": "test"
"type": "source"
},
"lib/web/views/*.ex": {
"lib/web/views/**/*.ex": {
"alternative": "test/integration/views/{}_test.exs",
"type": "source"
},
"lib/**/*.ex": {
"alternative": "test/unit/{}.ex",
"type": "source"
},
"lib/repo/*.ex": {
"alternative": "test/unit/repo/{}.ex",
"type": "source"
},
"test/integration/controllers/**/*_test.exs": {
"alternative": "web/controllers/{}.ex",
"type": "test"
},
"test/integration/views/**/*_test.exs": {
"alternative": "web/views/{}.ex",
"type": "test"
},
"test/unit/**/*_test.exs": {
"alternative": "lib/{}.ex",
"type": "test"
},
"*": {
"make": "mix",
"start": "mix phx.server"
}
}

+ 1
- 1
Dockerfile View File

@@ -18,9 +18,9 @@ RUN sh /tmp/koype-docker/prepare.sh
WORKDIR /opt/koype

COPY . /opt/koype/
VOLUME /opt/koype/priv/repo/db
RUN sh /tmp/koype-docker/build.sh

VOLUME /opt/koype/priv/repo/db
RUN sh /tmp/koype-docker/cleanup.sh

SHELL ["/bin/bash"]


+ 13
- 0
INSTALL.markdown View File

@@ -7,17 +7,30 @@ own their identity online. Or something like that, I'm still working on it.

* [Redis][] 5.0.0
* [Elixir][] 1.6.6
* [SQLite][] 3.24.0

## Development

### Using Docker Compose

Before getting into [Docker Compose][1], you'll need to create the environment
variable file to prime your system for use. You can do this by running `cp .env.example .env`

Being that Koype's tailored to run currently under a Docker environment, the
ideal tool for local development is [Docker Compose][1]. Check out its
documentation for instructions on how to install that. Once you've got installed,
run `docker-compose up` in the directory.

Looking for a one-liner / copy-paste friendly?

```sh
cp .env.example .env
pip install docker-compose
docker-compose up
```

[1]: https://docs.docker.com/compose/install/
[2]: http://phoenixframework.org/
[redis]: https://redis.io/
[elixir]: https://elixir-lang.org/
[sqlite]: https://sqlite.org/

+ 8
- 4
README.markdown View File

@@ -1,16 +1,20 @@
# new.jacky.wtf
# Koype

[![Build Status](https://ci.jacky.wtf/api/badges/indieweb/koype/status.svg)](https://ci.jacky.wtf/indieweb/koype)

This represents the foundation of my next-generation website. It's written
from the ground-up by hand with a plug-in design but purely for my use.
I plan to run this on my host machine using Dokku.
Koype is a server-side Web app that provides people with the ability to [own their data][4] and still interact with [other forms of social media][5]. The notion with
Koype is that everything you post is _your data_ and you are your own source of truth.

Setup information is in [`INSTALL`][2].

General documentation can be found in [`docs/`][3].

### Licensing

This project's source code is licensed under the [AGPL3][1].

[1]: https://choosealicense.com/licenses/agpl-3.0/
[2]: ./INSTALL.markdown
[3]: ./docs
[4]: https://indieweb.org/ownyourdata
[5]: https://indieweb.org/silo

+ 1
- 1
docker/scripts/build.sh View File

@@ -5,7 +5,7 @@ HEX_HTTP_TIMEOUT=600
NODE_ENV=${ENV}

echo " ---> [npm] Pulling dependencies..."
npm install --no-bin-links || exit 50
npm install --verbose --no-bin-links || exit 50

echo " ---> [mix] Preparing..."
mix local.hex --force || exit 10


+ 0
- 3
docker/scripts/post-deploy.sh View File

@@ -5,6 +5,3 @@ touch "/opt/koype/priv/repo/db/${MIX_ENV:-prod}.db" || exit 15

echo " ----> [deploy:post] Setting up database..."
mix ecto.setup || exit 20

echo " ----> [deploy:post] Update static digests..."
mix phx.digest || exit 40

+ 3
- 0
docker/scripts/pre-deploy.sh View File

@@ -14,3 +14,6 @@ echo " ----> [deploy:pre] <skip> Confirm permissions on object storage..."

echo " ----> [deploy:pre] <skip> Run production-ready checks... "
# mix koype.smoke_test || exit 60

echo " ----> [deploy:pre] Update static digests..."
mix phx.digest || exit 50

+ 7
- 1
docker/scripts/prepare.sh View File

@@ -5,6 +5,7 @@ ONE_WEEK_IN_MINUTES=10080
echo " ---> [apk] Syncing repos (allowed to fail)..."
apk update --cache-max-age="${ONE_WEEK_IN_MINUTES}" --verbose


echo " ---> [apk] Fetching baseline packages..."
apk add --verbose \
bash \
@@ -12,13 +13,18 @@ apk add --verbose \
coreutils \
tzdata \
curl \
sqlite-libs \
inotify-tools \
imagemagick \
nodejs=8.14.0-r0 \
npm=8.14.0-r0 \
|| exit 20

apk add --verbose \
--repository="http://dl-cdn.alpinelinux.org/alpine/edge/main" \
sqlite-libs \
|| exit 21


echo " ---> [apk] Fetching development packages..."
apk add --virtual=build --verbose \
gcc \


+ 1
- 3
lib/application.ex View File

@@ -8,9 +8,9 @@ defmodule Koype.Application do
load_runtime_config()

children = [
supervisor(Koype.Repo, []),
Koype.Cache.get_supervisor(),
worker(Guardian.DB.Token.SweeperServer, []),
supervisor(Koype.Repo, []),
supervisor(Koype.Web.Endpoint, [])
]

@@ -25,10 +25,8 @@ defmodule Koype.Application do
[
:koype,
:logger,
:sentry,
:phoenix,
:cowboy,
:sse,
:arc,
:ex_aws
]


+ 1
- 1
lib/cache.ex View File

@@ -73,7 +73,7 @@ defmodule Koype.Cache do
{:ok, value}

{:error, error} ->
Logger.warn("Failed to save #{key} to cache: #{error}.")
Logger.warn("Failed to save #{key} to cache: #{inspect(error)}.")
{:error, error}
end
end


+ 18
- 0
lib/format.ex View File

@@ -0,0 +1,18 @@
defmodule Koype.Format do
@moduledoc """
Helper module to convert a struct into a social format.

Provides a set of wrapper methods to the underlying structures
used for the social media format in question.

Targeting:
- Microformats2
- JsonFeed
- ActivityStreams 2
"""

def to(format, data)
# def to(:mf2_json, data), do: Koype.Format.MF2.generate(data)
# def to(:jf2, data), do: Koype.Format.JF2.generate(data)
# def to(:jsonfeed, data), do: Koype.Format.JF2.generate(data)
end

+ 33
- 23
lib/http.ex View File

@@ -13,40 +13,47 @@ defmodule Koype.Http do
@timeout 30_000

defmodule Response do
@moduledoc "Represents a HTTP response."
@enforce_keys [:code]
defstruct ~w(code body headers raw)a
@type t :: %Response{code: Integer.t(), body: Map.t(), headers: Map.t(), raw: any()}
end

defmodule Error do
@moduledoc "Represents a HTTP error."
@enforce_keys [:reason]
defstruct ~w(reason raw)a
@type t :: %Error{reason: any(), raw: any()}
end

for method <- ~w(get post put patch delete head options)a do
@doc "Helper method to dispatch a #{method} request."
@spec unquote(method)(url :: String.t(), args :: Keyword.t()) :: {:ok, Response.t()} | {:error, Error.t()}
def unquote(method)(url, args \\ []) do
request(unquote(method), url, args)
end
end

@spec request(method :: Atom.t(), url :: String.t(), args :: Keyword.t()) ::
{:ok, Response.t()} | {:error, Error.t()}
@doc "Dispatches a request over the network."
@spec request(method :: Atom.t(), url :: String.t(), args :: Keyword.t()) :: {:ok, Response.t()} | {:error, Error.t()}
def request(method, url, args \\ []) do
[
&do_request_poison/1,
&do_request_potion/1
&do_request_potion/1,
&do_request_poison/1
]
|> Enum.reduce_while({:error, :unspecified_network_error}, fn handler, _acc ->
Logger.info("Sending request #{url} via method #{method}.")
|> Enum.reduce_while({:error, :unspecified_network_error}, fn handler, acc ->
Logger.info("Sending request to #{url} via method #{method}")
resp = handler.([url: url, method: method] ++ args)

case resp do
{:ok, resp} ->
{:halt, {:ok, resp}}

{:error, err} ->
{:cont, {:error, err}}
{:error, resp} ->
{:halt, {:error, resp}}

:fatal ->
{:cont, acc}
end
end)
end
@@ -56,8 +63,6 @@ defmodule Koype.Http do

options = [
timeout: @timeout,
body: %{},
query: %{},
follow_redirects: true,
auto_sni: true,
headers: headers
@@ -71,13 +76,10 @@ defmodule Koype.Http do
HTTPotion.request(
final_args[:method],
final_args[:url],
body: final_args[:body],
headers: final_args[:headers],
query: final_args[:query],
follow_redirects: final_args[:follow_redirects],
timeout: final_args[:timeout],
basic_auth: final_args[:basic_auth],
auto_sni: final_args[:auto_sni]
Keyword.take(
final_args,
~w(body headers query follow_redirects timeout basic_auto auto_sni)a
)
)

case result do
@@ -85,12 +87,16 @@ defmodule Koype.Http do
{:ok, %Koype.Http.Response{code: code, body: body, headers: resp.headers, raw: resp}}

%HTTPotion.ErrorResponse{message: reason} = resp ->
{:error, %Koype.Http.Error{reason: reason, raw: resp}}
{:error, %Koype.Http.Error{reason: reason}}
end
rescue
err ->
Logger.error("Failed to handle #{final_args[:url]} with HTTPotion: #{inspect(err)}")
:fatal
catch
err ->
Logger.info("Failed to handle #{final_args[:url]} with HTTPoison: #{IO.inspect(err)}")
{:error, err}
Logger.error("Failed to handle #{final_args[:url]} with HTTPotion: #{err}")
:fatal
end
end

@@ -115,12 +121,16 @@ defmodule Koype.Http do
{:ok, %Koype.Http.Response{code: code, body: body, headers: resp.headers, raw: resp}}

{:error, %HTTPoison.Error{reason: reason} = resp} ->
{:error, %Koype.Http.Error{reason: reason, raw: resp}}
{:error, %Koype.Http.Error{reason: reason}}
end
rescue
err ->
Logger.error("Failed to handle #{final_args[:url]} with HTTPoison: #{inspect(err)}")
:fatal
catch
err ->
Logger.info("Failed to handle #{final_args[:url]} with HTTPoison: #{IO.inspect(err)}")
{:error, err}
Logger.error("Failed to handle #{final_args[:url]} with HTTPoison: #{err}")
:fatal
end
end
end

+ 4
- 5
lib/indieweb/app.ex View File

@@ -24,15 +24,14 @@ defmodule IndieWeb.App do
[
## Recommended parsers to stop at
IndieWeb.App.HxApp,
IndieWeb.App.MF2,

## Open stopgap solutions
IndieWeb.App.WebManifest,
IndieWeb.App.Link,
IndieWeb.App.Link
# IndieWeb.App.WebManifest,

## Corporate-backed standards
IndieWeb.App.OpenGraph,
IndieWeb.App.SchemaOrg
# IndieWeb.App.OpenGraph,
# IndieWeb.App.SchemaOrg

## Experiments
# IndieWeb.App.IndieStore


+ 12
- 15
lib/indieweb/app/h_x_app.ex View File

@@ -1,14 +1,19 @@
defmodule IndieWeb.App.HxApp do
@moduledoc """
Provides baseline information about an application from the
Microformats2 information it provides.
Parses h-x-app information from a Website.

Using the specificiation of h-x-app information outlined at
https://indieweb.org/h-x-app. This allows platforms that are
richly formatted with said markup to be presented to Koype
in a useful manner.

NOTE: Cache this information to database for usage tracking.
"""
@behaviour IndieWeb.App.Parser

alias IndieWeb.MF2
require Logger

# TODO: Remove x-app usage here.
defp do_format(data) do
result =
Enum.reduce_while(
@@ -45,19 +50,11 @@ defmodule IndieWeb.App.HxApp do
defp do_fetch(url) do
case MF2.Remote.fetch(url) do
{:error, error} ->
Logger.warn("Failed to resolve MF2 data for #{url}: #{error}")
false

{:ok, %{items: nil}} ->
Logger.info("Can't find any items for #{url}.")
false

{:ok, %{items: []}} ->
Logger.info("No items for #{url} were available.")
Logger.warn("Failed to resolve MF2 data for #{url} to use for h-x-app: #{error}")
false

{:ok, mf2_data} ->
Logger.info("Obtained MF2 data for #{url}")
Logger.info("Obtained MF2 data for #{url} to use for h-x-app.")
{:ok, mf2_data}
end
end
@@ -70,6 +67,6 @@ defmodule IndieWeb.App.HxApp do
end
end

@impl false
def clear(_uri), do: :ok
@impl true
def clear(uri), do: IndieWeb.MF2.Remote.flush(uri)
end

+ 20
- 31
lib/indieweb/app/link.ex View File

@@ -1,5 +1,14 @@
defmodule IndieWeb.App.Link do
@behaviour IndieWeb.App.Parser
@moduledoc """
Extracts information about app from <link> info.

This takes information stored in the <link rel> bits of the site and uses
it to render generic information about the application. It's not the best
solution.

FIXME: Decorate with <meta> information.
"""

alias IndieWeb.MF2
require Logger
@@ -7,46 +16,26 @@ defmodule IndieWeb.App.Link do
defp do_fetch(url) do
case MF2.Remote.fetch(url) do
{:error, error} ->
Logger.warn("Failed to resolve <link> data for #{url}: #{error}")
false

{:ok, %{rels: nil}} ->
Logger.info("Can't find any <link rel> info for #{url}.")
false

{:ok, %{rels: []}} ->
Logger.info("No <link rel> for #{url} were available.")
Logger.warn("Failed to resolve <link rel=> data for #{url}: #{error}")
false

{:ok, mf2_data} ->
Logger.info("Obtained <link rel> data for #{url}.")
{:ok, mf2_data[:rels] |> Map.put(:url, url)}
{:ok, %{"rels" => rels}} when is_map(rels) ->
Logger.info("Obtained <link rel=> data for #{url}.")
{:ok, Map.put(rels, "url", url)}
end
end

defp do_format(%{icon: icon, url: url} = rels) when is_list(icon) do
icon = rels[:icon] |> List.first()
uri = URI.parse(url)

defp do_format(%{"icon" => icon, "url" => url} = rels) when is_list(icon) do
app_data = %{
logo: icon,
url: uri |> URI.to_string(),
name: uri.host
logo: List.first(icon),
name: URI.parse(url).host,
url: url
}

{:ok, app_data}
end

defp do_format(%{icon: nil} = _rels) do
{:error, :icon_missing_for_link}
end

defp do_format(data) do
cond do
!Map.has_key?(data, :icon) -> {:error, :no_icon_for_data}
true -> {:error, :unknown_error}
end
end
defp do_format(_), do: {:error, :no_useful_link_data}

@impl true
def resolve(uri) do
@@ -56,6 +45,6 @@ defmodule IndieWeb.App.Link do
end
end

@impl false
def clear(_), do: :ok
@impl true
def clear(uri), do: IndieWeb.MF2.Remote.flush(uri)
end

+ 0
- 60
lib/indieweb/app/mf2.ex View File

@@ -1,60 +0,0 @@
defmodule IndieWeb.App.MF2 do
@behaviour IndieWeb.App.Parser
@moduledoc """
Provides baseline information about an application from the
Microformats2 information it provides.
"""
alias IndieWeb.MF2
require Logger

# TODO: Remove x-app usage here.
defp do_format(data) do
case MF2.get_format(data, :"x-app") do
{:ok, mf2_data} ->
app_data =
~w(name logo url)a
|> Enum.map(fn key ->
{key, mf2_data |> MF2.get_value!(key) |> List.first()}
end)
|> Map.new()

{:ok, app_data}

{:error, _error} ->
{:error, :no_app_info}
end
end

defp do_fetch(url) do
case MF2.Remote.fetch(url) do
{:error, error} ->
Logger.warn("Failed to resolve MF2 data for #{url}: #{error}")
false

{:ok, %{items: nil}} ->
Logger.info("Can't find any items for #{url}.")
false

{:ok, %{items: []}} ->
Logger.info("No items for #{url} were available.")
false

{:ok, mf2_data} ->
Logger.info("Obtained MF2 data for #{url}")
{:ok, mf2_data}
end
end

@behaviour IndieWeb.App.Parser

@impl true
def resolve(uri) do
case do_fetch(uri) do
{:ok, mf2_data} -> do_format(mf2_data)
false -> {:error, :failed_to_fetch_mf2_data}
end
end

@impl false
def clear(_uri), do: :ok
end

+ 3
- 4
lib/indieweb/app/opengraph.ex View File

@@ -1,11 +1,10 @@
defmodule IndieWeb.App.OpenGraph do
@moduledoc false
@behaviour IndieWeb.App.Parser

@impl true
def resolve(_url) do
{:error, :not_implemented}
end
def resolve(_url), do: {:error, :not_implemented}

@impl false
@impl true
def clear(_uri), do: :ok
end

+ 3
- 2
lib/indieweb/app/schemaorg.ex View File

@@ -1,9 +1,10 @@
defmodule IndieWeb.App.SchemaOrg do
@moduledoc false
@behaviour IndieWeb.App.Parser

@impl true
def resolve(_), do: :ok
def resolve(_), do: {:error, :not_implemented}

@impl false
@impl true
def clear(_), do: :ok
end

+ 3
- 2
lib/indieweb/app/webmanifest.ex View File

@@ -1,9 +1,10 @@
defmodule IndieWeb.App.WebManifest do
@moduledoc false
@behaviour IndieWeb.App.Parser

@impl true
def resolve(url), do: :ok
def resolve(_), do: {:error, :not_implemented}

@impl false
@impl true
def clear(_), do: :ok
end

+ 1
- 1
lib/indieweb/auth/scope.ex View File

@@ -116,5 +116,5 @@ defmodule IndieWeb.Auth.Scope do
})
end

def can_upload?(scope), do: Enum.member?(scope, "media")
def can_upload?(scope) when is_list(scope), do: Enum.member?(scope, "media")
end

+ 0
- 130
lib/indieweb/jf2.ex View File

@@ -1,130 +0,0 @@
defmodule IndieWeb.JF2 do
@moduledoc false
alias Koype.Repo.Entry
alias Koype.Storage.ObjectData
alias IndieWeb.Post

# FIXME: Handle the case of a singular value
defp do_extract_categories(jf2, %{category: categories}) when is_list(categories) do
Map.put(
jf2,
:category,
Enum.map(categories, fn c ->
{:ok, obj} = Jason.decode(c)
obj["name"]
end)
)
end

defp do_extract_categories(jf2, _), do: jf2

defp do_extract_content(jf2, object_data) do
content = object_data[:content]

cond do
is_map(content) || is_binary(content) ->
Map.put(jf2, :content, content)

is_list(content) && length(content) == 1 ->
Map.put(jf2, :content, List.first(content))

is_binary(content) ->
Map.put(jf2, :content, content)

true ->
jf2
end
end

defp do_extract_post_type_properties(jf2, object_data) do
with(
{:ok, types} <- Post.determine_type(object_data),
type <- Post.determine_dominant_type(types, object_data)
) do
jf2
|> (fn jf2 -> Map.put(jf2, :name, Post.determine_title(type, object_data)) end).()
|> (fn jf2 ->
case type do
:like ->
Map.put(
jf2,
:"like-of",
Post.get_properties_for_type(type, object_data)
)

:reply ->
Map.put(
jf2,
:"in-reply-to",
Post.get_properties_for_type(type, object_data)
)

:bookmark ->
Map.put(
jf2,
:"bookmark-of",
Post.get_properties_for_type(type, object_data)
)

:repost ->
Map.put(
jf2,
:"repost-of",
Post.get_properties_for_type(type, object_data)
)

:photo ->
photo = Post.get_properties_for_type(type, object_data)

handle_photo = fn photo ->
Map.get(photo, :sizes) |> Enum.map(fn {_, photo} -> %{url: photo[:uri]} end)
end

Map.put(
jf2,
:photo,
cond do
is_list(photo) -> Enum.map(photo, fn p -> handle_photo.(p) end)
is_map(photo) -> handle_photo.(photo)
true -> true
end
)

:event ->
Map.merge(
jf2,
Map.take(object_data, ~w(location start end duration)a)
)

:article ->
Map.merge(
jf2,
Map.take(object_data, ~w(name summary)a)
)

_ ->
jf2
end
end).()
end
end

@doc false
def from_entry(%Entry{} = entry) do
{:ok, object_data} = ObjectData.fetch(entry)
props = object_data[:properties]

base_attrs = %{
"@context": "http://www.w3.org/ns/jf2",
type: entry.type,
url: Entry.get_uri(entry),
published: props[:published]
}

base_attrs
|> do_extract_post_type_properties(props)
|> do_extract_content(props)
|> do_extract_categories(props)
|> ObjectData.flatten_values()
end
end

+ 1
- 7
lib/indieweb/mf2.ex View File

@@ -18,13 +18,7 @@ defmodule IndieWeb.MF2 do

def get_value!(_mf2, _property), do: {:error, :no_properties}

def parse(data), do: parse(data, Koype.host())

def parse(data, base_uri) do
def parse(data, base_uri \\ Koype.host()) do
Microformats2.parse(data, base_uri)
end

def from_entry(%Koype.Repo.Entry{type: "note"} = entry) do
Koype.Storage.ObjectData.fetch(entry)
end
end

+ 12
- 14
lib/indieweb/mf2/remote.ex View File

@@ -9,36 +9,34 @@ defmodule IndieWeb.MF2.Remote do
alias Koype.Http
require Logger

defp do_normalize(json), do: Jason.decode(json, keys: :atoms)
defp do_normalize(json), do: Jason.decode(json, keys: :strings, strings: :copy)
defp do_serialize(mf2), do: Jason.encode_to_iodata(mf2)
defp key_func(uri), do: :crypto.hash(:sha256, uri) |> Base.encode16()

defp fetch_mf2(uri) do
%{scheme: base_scheme, host: base_host} = URI.parse(uri)
base_uri = %URI{scheme: base_scheme, host: base_host}
Logger.debug("Attempting to fetch #{uri}...")
Logger.debug(fn -> "Attempting to fetch #{uri}..." end)

with(
{:ok, %Http.Response{code: 200, body: body}} <- Http.get(uri),
mf2 when is_map(mf2) <- IndieWeb.MF2.parse(body, base_uri),
true <- Map.has_key?(mf2, :items)
{:ok, %Http.Response{code: code, body: body}} when (code >= 200 and code < 300) or code == 410 <- Http.get(uri),
mf2 when is_map(mf2) <- IndieWeb.MF2.parse(body, base_uri)
) do
Logger.debug("Obtained valid response from #{uri}; serializing...")
Logger.debug(fn -> "Obtained valid response from #{uri}; serializing..." end)
mf2 |> do_serialize
else
{:error, %Http.Error{reason: reason}} ->
Logger.debug("Failed to fetch #{uri}: #{reason}")
{:error, reason}

{:error, error} ->
Logger.debug("Failed to fetch #{uri} for unrecognized error: #{error}")
{:error, :unexpected_error}
Logger.error("Failed to fetch #{uri}: #{inspect(error)}")
{:error, error}

:error ->
{:error, :mf2_parsing_failure}

false ->
{:error, :no_mf2_data_found}

{:ok, %Http.Response{}} ->
{:error, :no_mf2_data_found}
end
end

@@ -46,8 +44,8 @@ defmodule IndieWeb.MF2.Remote do
Logger.info("Fetching #{uri}...")

case Cache.fetch_for(uri, &key_func/1) do
{:error, _} ->
Logger.info("#{uri} not found in cache; refreshing.")
{:error, error} ->
Logger.info("#{uri} not found in cache due to error #{error}; refreshing.")
refresh(uri)

{:ok, nil} ->


+ 7
- 5
lib/indieweb/micropub.ex View File

@@ -1,7 +1,7 @@
defmodule IndieWeb.Micropub do
@actionable_keys ~w(replace add delete undelete)
@actionable_keys ~w(replace add delete)
@reserved_mp_param_keys ~w(h action url content) ++ @actionable_keys
@sensitive_properties [:access_token, "access_token"]
@sensitive_properties ~w(access_token)c
@micropub_prefix "mp-"

def reserved_keyword?(key) do
@@ -13,18 +13,20 @@ defmodule IndieWeb.Micropub do
end

@doc "Gets the sub-map where the keys are known to be actionable for update operations."
@spec actionable(params :: Map.t()) :: Map.t()
@spec actionable(params :: map()) :: map()
def actionable(params), do: Map.take(params, @actionable_keys)

@spec actionable(properties :: map()) :: map()
def scrub_properties(properties), do: Map.drop(properties, @sensitive_properties)

@spec extract_params(properties :: map()) :: {map(), map()}
def extract_params(properties) do
scrubbed_properties = properties |> scrub_properties
reserved_keys = scrubbed_properties |> Map.keys() |> Enum.filter(&reserved_keyword?/1)

reserved_params = scrubbed_properties |> Map.take(reserved_keys) |> Map.new()
entry_params = scrubbed_properties |> Map.drop(reserved_keys) |> Map.new()
params = scrubbed_properties |> Map.drop(reserved_keys) |> Map.new()

{entry_params, reserved_params}
{params, reserved_params}
end
end

+ 5
- 12
lib/indieweb/micropub/content.ex View File

@@ -1,7 +1,6 @@
defmodule IndieWeb.Micropub.Content do
@doc "Handles the prescribed action for the content with the provided arguments."
@callback invoke(action :: String.t(), arguments :: Keyword.t()) ::
:ok | {:ok, any()} | {:error, any()}
@callback invoke(action :: String.t(), arguments :: Keyword.t()) :: :ok | {:ok, any()} | {:error, any()}

@doc "Determines if the content action is supported."
@callback supports?(action :: String.t()) :: true | false
@@ -31,6 +30,8 @@ defmodule IndieWeb.Micropub.Content do
@spec process_property(name :: String.t(), value: any()) :: {:error, any()} | {:ok, any()}
def process_property(name, value)

# NOTE: Add support for detecting venue information.
# NOTE: Add support for piping information to atlas.p3k.io
def process_property("location", value) when is_binary(value) do
cond do
String.starts_with?(value, "geo:") ->
@@ -50,11 +51,6 @@ defmodule IndieWeb.Micropub.Content do
def process_property("category", categories) when is_list(categories) do
Enum.reduce_while(categories, {:ok, []}, fn category, {:ok, results} ->
case process_property("category", category) do
{:error, :category_exists} ->
{:ok, record} = Koype.Repo.Category.fetch(category)
{:ok, encoded_record} = Jason.encode(record)
{:cont, {:ok, results ++ [encoded_record]}}

{:error, _} = error ->
{:halt, error}

@@ -65,15 +61,12 @@ defmodule IndieWeb.Micropub.Content do
end

def process_property("category", category) when is_binary(category) do
case Koype.Repo.Category.create(%{name: category}) do
{:ok, record} -> Jason.encode(record)
{:error, _error} = err -> err
end
Koype.Repo.fetch_or_create(Koype.Repo.Category, category, %{name: category})
end

def process_property(_name, value), do: {:ok, value}

@doc "Takes and transforms values of a map inline"
@doc "Takes and transforms values of a map inline."
@spec process_properties(props :: Map.t()) :: Map.t() | {:error, any()}
def process_properties(props) do
Enum.reduce_while(props, props, fn {key, value}, acc ->


+ 229
- 143
lib/indieweb/micropub/entry.ex View File

@@ -1,133 +1,224 @@
defmodule IndieWeb.Micropub.Entry do
@behaviour IndieWeb.Micropub.Content
@uploadable_keys ~w(photo video)
@uploadable_actions ~w(replace add update)
@uploadable_actions ~w(replace add)

alias IndieWeb.Auth.Scope
alias IndieWeb.Micropub
alias IndieWeb.Micropub.Content
alias Koype.Repo
alias Koype.Repo.Entry
alias Koype.Storage.ObjectData
alias Koype.Repo.Entry, as: Model

require Logger

# NOTE: Find all referenced in this entry.
# NOTE: Queue a job to send all of the mentions.
defp do_send_webmentions(entry) do
def send_webmentions(_) do
:ok
end

defp can_action_trigger_upload?(key),
do: Enum.member?(@uploadable_actions, key)

defp will_data_trigger_upload?(params),
do: Enum.any?(@uploadable_keys, fn key -> Map.has_key?(params, key) end)
defp can_action_trigger_upload?(key)
defp can_action_trigger_upload?(key), do: Enum.member?(@uploadable_actions, key)

defp do_update(action, entry, entry_data, data)
defp do_merge_value(properties, key, new_value)

defp do_update("replace", entry, _entry_data, data) do
{:ok, _entry} = Entry.update(entry, data)
:ok
defp do_merge_value(properties, key, new_value) when is_binary(new_value) do
do_merge_value(properties, key, [new_value])
end

defp do_update("add", entry, entry_data, data) do
added_properties = Map.keys(data)
defp do_merge_value(properties, key, new_value) when is_list(new_value) do
current_value = Map.get(properties, key, [])

new_entry_data =
Enum.map(entry_data, fn {key, value} ->
if Enum.member?(added_properties, key) do
cond do
is_binary(value) -> {key, [value] ++ data[key]}
is_list(value) -> {key, value ++ data[key]}
true -> {key, value}
end
cond do
key == "content" ->
if Map.has_key?(properties["content"], "html") do
Map.put(properties, "content", %{
"html" => current_value["html"] ++ new_value,
"value" => current_value["value"] ++ new_value
})
else
{key, value}
Map.put(properties, "content", %{"value" => current_value["value"] ++ new_value})
end
end)
|> Map.new()

{:ok, _entry} = Entry.update(entry, new_entry_data)
:ok
true ->
Map.put(properties, key, current_value ++ new_value)
end
end

defp do_update("delete", entry, entry_data, data) when is_list(data) do
new_entry_data = Map.drop(entry_data, data)
{:ok, _entry} = Entry.update(entry, new_entry_data)
:ok
defp do_drop_value(properties, "content", value) when is_binary(value) do
do_drop_value(properties, "content", [value])
end

defp do_update("delete", entry, entry_data, data) when is_map(data) do
deleted_properties = Map.keys(data)
defp do_drop_value(properties, "content", value) when is_map(value) do
Map.put(properties, "content", %{"value" => ""})
end

defp do_drop_value(properties, "content", value) when is_list(value) do
existing_values = Map.get(properties["content"], "value", [])

new_entry_data =
Enum.map(entry_data, fn {key, values} ->
if Enum.member?(deleted_properties, key) do
trimmed_props = Enum.reject(values, fn value -> Enum.member?(data[key], value) end)
Map.put(
properties,
"content",
Map.put(properties["content"], "value", existing_values -- value)
)
end

{key, trimmed_props}
defp do_drop_value(properties, key, value) when is_binary(value) do
do_drop_value(properties, key, [value])
end

defp do_drop_value(properties, key, value) when is_list(value) do
existing_values = Map.get(properties, key, [])
Map.put(properties, key, existing_values -- value)
end

defp do_enact_update_action(action, model, properties, action_data)

defp do_enact_update_action("replace", model, properties, action_data) do
result =
Enum.reduce_while(action_data, properties, fn {key, value}, updated_properties ->
if Koype.Storage.does_property_need_upload?(key) do
with(
:ok <-
Koype.Storage.destroy_property_for_record(
{key, Map.get(updated_properties, key, [])},
model
),
{^key, uploaded_values} <- Koype.Storage.upload_property_for_record({key, action_data[key]}, model)
) do
{:cont, {:ok, Map.put(updated_properties, key, uploaded_values)}}
else
{:error, error} -> {:halt, {:error, key: key, error: error}}
end
else
{key, values}
{:cont, {:ok, Map.put(updated_properties, key, value)}}
end
end)
|> Map.new()

{:ok, _entry} = Entry.update(entry, new_entry_data)
:ok
case result do
{:ok, properties} -> Model.update(model, properties)
_ -> result
end
end

# TODO: Handle rollback cleanly if tx fails
defp do_update_entry(entry, scope: scope, params: params) do
{:ok, entry_data} = ObjectData.fetch(entry)
defp do_enact_update_action("add", model, properties, action_data) when is_map(action_data) do
result =
Enum.reduce_while(action_data, {:ok, properties}, fn {key, value}, {:ok, updated_properties} ->
if Koype.Storage.does_property_need_upload?(key) do
case Koype.Storage.upload_property_for_record({key, value}, model) do
{:ok, {^key, uploaded_values}} ->
{:cont, {:ok, do_merge_value(updated_properties, key, uploaded_values)}}

{:error, error} ->
{:halt, {:error, key: key, error: error}}
end
else
{:cont, {:ok, do_merge_value(updated_properties, key, value)}}
end
end)

actionables = Micropub.actionable(params)
actions = Map.keys(actionables)
case result do
{:ok, properties} -> Model.update(model, properties)
_ -> result
end
end

defp do_enact_update_action("delete", model, properties, action_data) when is_map(action_data) do
result =
Repo.transaction(fn ->
Enum.each(actions, fn action ->
data = actionables[action]

if can_action_trigger_upload?(action) && will_data_trigger_upload?(data) do
if Scope.can_upload?(scope) do
do_update(action, entry, entry_data, data)
else
Repo.rollback(:scope_missing)
end
else
do_update(action, entry, entry_data, data)
Enum.reduce_while(action_data, {:ok, properties}, fn {key, value}, {:ok, updated_properties} ->
if Koype.Storage.does_property_need_upload?(key) do
case Koype.Storage.destroy_property_for_record({key, value}, model) do
:ok -> {:cont, {:ok, do_drop_value(updated_properties, key, value)}}
{:error, error} -> {:halt, {:error, key: key, error: error}}
end
end)
else
{:cont, {:ok, do_drop_value(updated_properties, key, value)}}
end
end)

case result do
{:ok, :ok} -> :ok
{:ok, properties} -> Model.update(model, properties)
_ -> result
end
end

defp do_assoc_category_to_entry(entry, categories)
defp do_assoc_category_to_entry(entry, []), do: {:ok, entry}
defp do_enact_update_action("delete", model, properties, action_data) when is_list(action_data) do
result =
Enum.reduce_while(action_data, {:ok, model, properties}, fn key, {:ok, new_model, new_properties} ->
current_value = new_properties[key]

defp do_assoc_category_to_entry(entry, category) when is_binary(category) do
{:ok, json_category} = Jason.decode(category)
{:ok, record} = Koype.Repo.Category.fetch(json_category["id"])
with(
{:ok, refreshed_model} <-
do_enact_update_action("delete", new_model, new_properties, %{key => current_value}),
{:ok, refreshed_properties} <- Model.Json.find(refreshed_model)
) do
{:cont, {:ok, refreshed_model, refreshed_properties}}
else
{:error, _} = err -> {:halt, err}
end
end)

preloaded_entry = Koype.Repo.preload(entry, :categories)
case result do
{:ok, refreshed_model, _} -> {:ok, refreshed_model}
_ -> result
end
end

preloaded_entry
# TODO: Handle rollback cleanly if tx fails
defp do_update_entry(model, scope: scope, params: params) do
case Model.Json.find(model) do
{:ok, properties} ->
actionable_data = Micropub.actionable(params)

Repo.transaction(fn ->
Enum.reduce_while(Map.keys(actionable_data), nil, fn action, _ ->
action_data = actionable_data[action]

if can_action_trigger_upload?(action) and Koype.Storage.will_trigger_upload?(action_data) and
!Scope.can_upload?(scope) do
Logger.info(fn -> "Action '#{action}' required permissions to upload." end)
Repo.rollback(:scope_missing)
else
Logger.info(fn ->
"Invoking action #{action} on #{model.id} with #{inspect(action_data)}"
end)

case do_enact_update_action(action, model, properties, action_data) do
{:ok, model} -> {:cont, model}
{:error, error} -> Repo.rollback(error)
end
end
end)
end)

{:error, _} = error ->
error
end
end

defp do_assoc_category_to_entry(model, categories)
defp do_assoc_category_to_entry(model, []), do: {:ok, model}

defp do_assoc_category_to_entry(model, category) when is_binary(category) do
case Koype.Repo.fetch_or_create(Koype.Repo.Category, category, %{name: category}) do
{:ok, record} -> do_assoc_category_to_entry(model, record)
{:error, _} = error -> error
end
end

defp do_assoc_category_to_entry(model, %Koype.Repo.Category{} = record) do
preloaded_model = Koype.Repo.preload(model, :categories)
new_category_list = [record] ++ preloaded_model.categories

preloaded_model
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_assoc(
:categories,
[record] ++ preloaded_entry.categories
)
|> Ecto.Changeset.put_assoc(:categories, new_category_list)
|> Koype.Repo.update()
end

@spec do_assoc_category_to_entry(entry :: Entry.t(), categories :: List.t()) ::
{:ok, Entry.t()} | {:error, any()}
defp do_assoc_category_to_entry(entry, categories) when is_list(categories) do
Enum.reduce_while(categories, {:ok, entry}, fn category, {:ok, entry} ->
case do_assoc_category_to_entry(entry, category) do
@spec do_assoc_category_to_entry(entry :: Entry.t(), categories :: List.t()) :: {:ok, Entry.t()} | {:error, any()}
defp do_assoc_category_to_entry(model, categories) when is_list(categories) do
Enum.reduce_while(categories, {:error, :failed_assoc_category}, fn category, _ ->
case do_assoc_category_to_entry(model, category) do
{:ok, record} -> {:cont, {:ok, record}}
{:error, cs} -> {:halt, {:error, cs.errors}}
end
@@ -136,24 +227,52 @@ defmodule IndieWeb.Micropub.Entry do

def invoke(action, arguments)

@impl true
def invoke("update", scope: scope, params: [reserved: params, content: _]) do
if Enum.member?(scope, "update") do
url = params["url"]

case Model.resolve_from_uri(url) do
nil ->
{:error, :not_found}

{:ok, model} ->
with(
{:ok, refreshed_model} <- do_update_entry(model, scope: scope, params: params),
:ok <- send_webmentions(refreshed_model)
) do
{:ok, state: :updated, model: refreshed_model}
else
{:error, _} = error -> error
end

{:error, _} = error ->
error
end
else
{:error, :scope_missing}
end
end

@impl true
def invoke("create", scope: scope, params: [reserved: rp, content: cp]) do
if Enum.member?(scope, "create") do
full_params = Content.process_properties(Map.merge(rp, cp))
params = Map.merge(rp, cp)

if will_data_trigger_upload?(full_params) && !Scope.can_upload?(scope) do
if Koype.Storage.will_trigger_upload?(params) && !Scope.can_upload?(scope) do
{:error, :scope_missing}
else
categories = Map.get(params, "category", [])

with(
{:ok, entry} <- Entry.create("entry", full_params),
{:ok, entry} <- do_assoc_category_to_entry(entry, Map.get(full_params, "category", [])),
:ok <- do_send_webmentions(entry)
full_params when is_map(full_params) <- Content.process_properties(params),
{:ok, model} <- Model.create(full_params),
{:ok, reloaded_model} <- do_assoc_category_to_entry(model, categories),
:ok <- send_webmentions(model)
) do
{:ok, :created, entry}
{:ok, state: :created, model: reloaded_model}
else
{:error, cs} ->
Logger.warning("Failed to create entry: #{IO.inspect(cs.errors)}")
{:error, :not_created}
{:error, _} = error -> error
end
end
else
@@ -161,49 +280,21 @@ defmodule IndieWeb.Micropub.Entry do
end
end

@impl true
def invoke("delete", scope: scope, params: [reserved: reserved_params, content: _]) do
if Enum.member?(scope, "delete") do
url = Map.get(reserved_params, "url")

result =
case Entry.resolve_from_uri(url) do
nil ->
{:error, :not_found}

{:ok, entry} ->
with(
:ok <- Entry.delete(entry),
:ok <- do_send_webmentions(entry)
) do
:ok
else
{:error, _} = error -> error
end
end

case result do
:ok -> {:ok, :deleted}
{:error, :not_found} -> {:error, :entry_not_found}
end
else
{:error, :scope_missing}
end
end

def invoke("update", scope: scope, params: [reserved: params, content: _]) do
if Enum.member?(scope, "update") do
url = params["url"]

case Entry.resolve_from_uri(url) do
case Model.resolve_from_uri(url) do
nil ->
{:error, :entry_not_found}
{:error, :not_found}

{:ok, entry} ->
with(
:ok <- do_update_entry(entry, scope: scope, params: params),
:ok <- do_send_webmentions(entry)
{:ok, record} <- Model.delete(entry),
:ok <- send_webmentions(entry)
) do
{:ok, :updated, Koype.Repo.get(Koype.Repo.Entry, entry.id)}
{:ok, state: :deleted, model: record}
else
{:error, _} = error -> error
end
@@ -213,29 +304,24 @@ defmodule IndieWeb.Micropub.Entry do
end
end

def invoke("undelete", scope: scope, params: [reserved: reserved_params, content: _]) do
@impl true
def invoke("undelete", scope: scope, params: [reserved: params, content: _]) do
if Enum.member?(scope, "undelete") do
url = Map.get(reserved_params, "url")
url = Map.get(params, "url")

result =
case Entry.resolve_from_uri(url) do
nil ->
{:error, :not_found}

{:ok, entry} ->
with(
:ok <- Entry.undelete(entry),
:ok <- do_send_webmentions(entry)
) do
:ok
else
{:error, _} = error -> error
end
end
case Model.resolve_from_uri(url) do
nil ->
{:error, :not_found}

case result do
{:ok, entry} -> {:ok, :undeleted, entry}
{:error, :not_found} -> {:error, :entry_not_found}
{:ok, model} ->
with(
{:ok, record} <- Model.undelete(model),
:ok <- send_webmentions(record)
) do
{:ok, state: :undeleted, model: record}
else
{:error, _} = error -> error
end
end
else
{:error, :scope_missing_for_undelete}
@@ -243,5 +329,5 @@ defmodule IndieWeb.Micropub.Entry do
end

@impl true
def supports?(action), do: Enum.member?(~w(create update delete undelete post-process), action)
def supports?(action), do: Enum.member?(~w(create update delete undelete), action)
end

+ 171
- 178
lib/indieweb/post.ex View File

@@ -2,7 +2,8 @@ defmodule IndieWeb.Post do
alias Phoenix.HTML.SimplifiedHelpers.Truncate, as: T

@properties_to_kind %{
"location" => :checkin,
"checkin" => :checkin,
"audio" => :audio,
"in-reply-to" => :reply,
"in_reply_to" => :reply,
"like-of" => :like,
@@ -18,8 +19,6 @@ defmodule IndieWeb.Post do
"name" => :article
}

def response_types, do: ~w(reply listen like bookmark repost read follow article note)a

def known_types do
[
%{type: "article", name: "Article", names: "Articles"},
@@ -34,184 +33,73 @@ defmodule IndieWeb.Post do
%{type: "donation", name: "Donation", names: "Donations"},
%{type: "payment", name: "Payment", names: "Payments"},
%{type: "event", name: "Event", names: "Events"},
# %{type: "exercise", name: "Workout", names: "Workouts"},
%{type: "exercise", name: "Workout", names: "Workouts"},
%{type: "follow", name: "Follow", names: "Follows"},
# %{type: "food", name: "Food", names: "Meals"},
%{type: "food", name: "Food", names: "Meals"},
%{type: "gameplay", name: "Game Play", names: "Game Plays"},
# %{type: "issue", name: "Issue", names: "Issues"},
%{type: "issue", name: "Issue", names: "Issues"},
%{type: "photo", name: "Photo", names: "Photos"},
# %{type: "presentation", name: "Presentation", names: "Presentations"},
# %{type: "quotation", name: "Quotation", names: "Quotations"},
%{type: "presentation", name: "Presentation", names: "Presentations"},
%{type: "quotation", name: "Quotation", names: "Quotations"},
%{type: "read", name: "Read", names: "Reads"},
# %{type: "sleep", name: "Sleep", names: "Sleeps"},
# %{type: "venue", name: "Venue", names: "Venues"},
%{type: "sleep", name: "Sleep", names: "Sleeps"},
%{type: "venue", name: "Venue", names: "Venues"},
%{type: "watch", name: "Watch", names: "Watches"},
%{type: "video", name: "Video", names: "Videos"},
%{type: "rsvp", name: "RSVP", names: "RSVPs"}
]
end

def extract_post_types(mf2) do
mf2
|> Map.keys()
|> Enum.map(fn
key when is_atom(key) -> Map.get(@properties_to_kind, key |> Atom.to_string(), nil)
key when is_binary(key) -> Map.get(@properties_to_kind, key, nil)
end)
|> Enum.reject(&is_nil/1)
end

defp detect_note(data) do
cond do
is_binary(data[:content]) && String.length(data[:content]) == 0 ->
false

is_binary(data[:summary]) && String.length(data[:summary]) == 0 ->
false

!is_nil(data[:name]) && is_list(data[:name]) && String.length(List.first(data[:name])) != 0 ->
false

!is_nil(data[:name]) && String.length(data[:name]) != 0 ->
false

true ->
true
end
end

defp detect_article(data) do
name =
if is_binary(Map.get(data, :name, "")) do
String.trim(Map.get(data, :name, ""))
else
Map.get(data, :name)
end

content =
if is_binary(Map.get(data, :content, "")) do
String.trim(Map.get(data, :content, ""))
else
Map.get(data, :content)
end

plain_content =
if is_map(content) do
content[:value]
else
content
end

cond do
is_binary(name) && String.length(name) != 0 ->
true

is_list(name) && String.length(List.first(name)) != 0 ->
true

is_binary(name) && is_binary(content) && !String.starts_with?(content, name) ->
true

is_list(name) && is_list(plain_content) &&
!String.starts_with?(List.first(plain_content), name[0]) ->
true

is_list(name) && is_binary(plain_content) && !String.starts_with?(plain_content, name[0]) ->
true

is_binary(name) && is_list(plain_content) &&
!String.starts_with?(List.first(plain_content), name) ->
true

is_binary(name) && is_binary(plain_content) && !String.starts_with?(plain_content, name) ->
true

true ->
false
end
end

def determine_type(mf2)

def determine_type(%{properties: properties} = _mf2), do: determine_type(properties)

def determine_type(mf2) do
types = extract_post_types(mf2)
def response_types, do: ~w(reply listen like bookmark repost read follow)a

case types do
[] -> {:ok, [:note]}
_ -> {:ok, types}
end
def is_response_type?(type) do
Enum.member?(response_types(), type)
end

def get_properties_for_type(type, %{properties: properties} = _data),
do: get_properties_for_type(type, properties)

def get_properties_for_type(:video, %{video: video} = _data), do: video
def get_properties_for_type(:photo, %{photo: photo} = _data), do: photo
def get_properties_for_type(:like, %{"like-of": uri} = _data), do: uri
def get_properties_for_type(:repost, %{"repost-of": uri} = _data), do: uri

def get_properties_for_type(:reply, %{"in-reply-to": uri, content: _content} = _data),
do: uri

def get_properties_for_type(:bookmark, %{"bookmark-of": bookmark} = _data),
do: bookmark

def get_properties_for_type(:checkin, %{location: location} = _data) when is_binary(location) do
if String.starts_with?(location, "geo:") do
regex = ~r/geo\:c(?<lat>\d+)\,c(?<lng>\d+)/
%{lat: lat, lng: lng} = Regex.named_captures(regex, location)

%{longitude: lng, latitude: lat}
@doc "Determines the types exposed by the provided properties."
@spec determine_type(properties :: Map.t()) :: Atom.t()
def determine_type(properties) when is_map(properties) do
types =
properties
|> Map.keys()
|> Enum.map(fn
key when is_binary(key) -> Map.get(@properties_to_kind, key, nil)
key when is_atom(key) -> Map.get(@properties_to_kind, Atom.to_string(key), nil)
end)
|> Enum.reject(&is_nil/1)

if types == [] do
[:note]
else
location
types
end
end

def get_properties_for_type(:checkin, data), do: Map.take(data, ~w(location location-visibility)a)
@doc "Extract properties specific to a particular post type."
@spec get_properties_for_type(type :: Atom.t(), properties :: Map.t()) :: Map.t()
def get_properties_for_type(type, properties)
def get_properties_for_type(:note, properties), do: get_properties_for_type(:entry, properties)

def get_properties_for_type(:event, data),
do: Map.take(data, ~w(location location-visibility start end duration organizer)a)

def get_properties_for_type(:note, _data), do: %{}
def get_properties_for_type(:article, data), do: Map.take(data, ~w(summary)a)

def get_properties_for_type(:entry, data),
def get_properties_for_type(:entry, properties),
do:
Map.take(data, ~w(name content data category published updated syndication)a)
|> (fn map ->
if is_nil(map[:category]) do
map
else
Map.put(
map,
:category,
Enum.map(map[:category], fn category_json ->
{:ok, category} = Jason.decode(category_json)
{:ok, record} = Koype.Repo.Category.fetch(category["id"])
record
end)
)
end
end).()
Map.take(
properties,
~w(name summary content published updated author category url uid syndication)a
)

def determine_dominant_type(type, properties)

def determine_dominant_type(types, %{properties: properties}),
do: determine_dominant_type(types, properties)

def determine_dominant_type(types, properties) do
cond do
:reply in types ->
:reply

:event in types ->
:event

:rsvp in types ->
[:rsvp, :reply] in types ->
:rsvp

:reply in types ->
:reply

:checkin in types ->
:checkin

@@ -242,46 +130,151 @@ defmodule IndieWeb.Post do
end

def determine_title(type, data)
def determine_title(_, %{name: name} = _data), do: name

def determine_title(:photo, %{name: name} = _data), do: "Photo: #{name}"
def determine_title(:audio, %{"name" => name, "audio" => audio})
when is_list(audio) and length(audio) == 1 and is_binary(name) do
name <> " - One Audio"
end

def determine_title(:audio, %{"name" => name, "audio" => audio})
when is_list(audio) and is_binary(name) do
name <> " - #{length(audio)} Audio"
end

def determine_title(:audio, %{"content" => content, "name" => nil, "audio" => audio})
when is_list(audio) do
Enum.join(
[T.truncate(content["value"]), "-", determine_title(:audio, %{"audio" => audio})],
" "
)
end

def determine_title(:audio, %{"audio" => audio}) when is_list(audio) and length(audio) == 1 do
"One Audio"
end

def determine_title(:audio, %{"audio" => audio}) when is_list(audio) and length(audio) > 1 do
"#{length(audio)} Audio"
end

def determine_title(:photo, %{"name" => name, "photo" => photo})
when is_list(photo) and length(photo) == 1 and is_binary(name) do
name <> " - One Photo"
end

def determine_title(:photo, %{"name" => name, "photo" => photo})
when is_list(photo) and is_binary(name) do
name <> " - #{length(photo)} Photos"
end

def determine_title(:photo, %{"content" => content, "name" => nil, "photo" => photo})
when is_list(photo) do
Enum.join(
[T.truncate(content["value"]), "-", determine_title(:photo, %{"photo" => photo})],
" "
)
end

def determine_title(:photo, %{"photo" => photo}) when is_list(photo) and length(photo) == 1 do
"One Photo"
end

def determine_title(:photo, %{"photo" => photo}) when is_list(photo) and length(photo) > 1 do
"#{length(photo)} Photos"
end

def determine_title(:photo, %{content: content} = _data),
do: "Photo: #{content |> T.truncate()}"
def determine_title(:video, %{"name" => name, "video" => video})
when is_list(video) and length(video) == 1 and is_binary(name) do
name <> " - One Video"
end

def determine_title(:video, %{"name" => name, "video" => video})
when is_list(video) and is_binary(name) do
name <> " - #{length(video)} Videos"
end

def determine_title(:video, %{"content" => content, "video" => video, "name" => nil})
when is_list(video) do
Enum.join(
[T.truncate(content["value"]), "-", determine_title(:video, %{"video" => video})],
" "
)
end

def determine_title(:video, %{"video" => video}) when is_list(video) and length(video) == 1 do
"One Video"
end

def determine_title(:bookmark, %{content: content} = _data),
do: "Bookmarked: #{content |> T.truncate()}"
def determine_title(:video, %{"video" => video}) when is_list(video) and length(video) > 1 do
"#{length(video)} Videos"
end

def determine_title(:bookmark, %{:"bookmark-of" => uri} = _data),
do: "Bookmarked: #{uri |> T.truncate()}"
def determine_title(:note, %{"content" => content}) when is_map(content),
do: T.truncate(content["value"])

def determine_title(:bookmark, %{bookmark: uri} = _data),
do: "Bookmarked: #{uri |> T.truncate()}"
def determine_title(:bookmark, %{"content" => content}) when is_map(content),