:)
This commit is contained in:
commit
1c273e1fc0
46 changed files with 7589 additions and 0 deletions
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
/target
|
||||||
|
/data
|
||||||
|
/.vscode
|
||||||
|
docker-compose.yaml
|
||||||
|
booruconfig.toml
|
||||||
|
.env
|
||||||
3648
Cargo.lock
generated
Normal file
3648
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
35
Cargo.toml
Normal file
35
Cargo.toml
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
[package]
|
||||||
|
name = "axumbooru"
|
||||||
|
authors = ["Shiroyashik <shiroyashik@shsr.ru>"]
|
||||||
|
license = "GPL-3.0-or-later"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[workspace]
|
||||||
|
members = ["migration"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
dotenvy = "0.15.7"
|
||||||
|
uuid = { version = "1.8.0", features = ["v4", "fast-rng"] }
|
||||||
|
serde = { version = "1.0.197", features = ["derive"] }
|
||||||
|
serde_json = "1.0.115"
|
||||||
|
tokio = { version = "1.37.0", features = ["full"] }
|
||||||
|
axum = { version = "0.7.5", features = ["json", "query", "tracing", "multipart"] }
|
||||||
|
toml = "0.8.12"
|
||||||
|
tower-http = { version = "0.5.2", features = ["fs", "trace"] }
|
||||||
|
chrono = { version = "0.4.37", features = ["serde"] }
|
||||||
|
sea-orm = { version = "0.12.15", features = ["sqlx-postgres", "runtime-tokio-rustls", "macros", "with-chrono", "with-uuid", "debug-print"] }
|
||||||
|
hmac = "0.12.1"
|
||||||
|
md-5 = "0.10.6"
|
||||||
|
mime_guess2 = "2.0.5"
|
||||||
|
thiserror = "1.0.58"
|
||||||
|
argon2 = "0.5.3"
|
||||||
|
sha1 = "0.10.6"
|
||||||
|
log = "0.4.21"
|
||||||
|
env_logger = "0.11.3"
|
||||||
|
anyhow = "1.0.82"
|
||||||
|
# Currently doesn't using it
|
||||||
|
ring = "0.17.8"
|
||||||
|
dashmap = "5.5.3" # and it
|
||||||
|
data-encoding = "2.5.0"
|
||||||
674
LICENSE.txt
Normal file
674
LICENSE.txt
Normal file
|
|
@ -0,0 +1,674 @@
|
||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||||
17
README.md
Normal file
17
README.md
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
## Axumbooru
|
||||||
|
**Проект не завершен и находится в активной разработке!**
|
||||||
|
|
||||||
|
Выполнить для подготовки окружения к разработке
|
||||||
|
```bash
|
||||||
|
echo -e "DATABASE_URL=postgres://axumbooru:axumbooru@localhost/axumbooru\nRUST_LOG=debug" > .env
|
||||||
|
cp booruconfig_default.toml booruconfig.toml
|
||||||
|
cp docker-compose_templ.yaml docker-compose.yaml
|
||||||
|
# Запускает Postgresql на 5432,
|
||||||
|
# szurubooru/client на 80 и phpPgAdmin на 8080
|
||||||
|
docker compose up -d
|
||||||
|
cargo install sea-orm-cli
|
||||||
|
# Не забудь добавить ~/.cargo/bin в PATH
|
||||||
|
sea migrate
|
||||||
|
# Собирает и запускает Axumbooru
|
||||||
|
cargo run
|
||||||
|
```
|
||||||
164
booruconfig_default.toml
Normal file
164
booruconfig_default.toml
Normal file
|
|
@ -0,0 +1,164 @@
|
||||||
|
name = "axumbooru"
|
||||||
|
# Следующая часть конфига повторяет Szurubooru
|
||||||
|
# и используется для примера, поэтому некоторые
|
||||||
|
# аргументы могут не использоваться в коде
|
||||||
|
# и не дойти до релиза.
|
||||||
|
# P.S. Также некоторые ключи которые я посчитал
|
||||||
|
# ненужными или изыботочными опущены.
|
||||||
|
|
||||||
|
|
||||||
|
listen = "127.0.0.1:6667"
|
||||||
|
|
||||||
|
# full url to the homepage of this szurubooru site, with no trailing slash
|
||||||
|
domain = "" # example = http://example.com
|
||||||
|
# used to salt the users' password hashes and generate filenames for static content
|
||||||
|
secret = "change"
|
||||||
|
|
||||||
|
# Delete thumbnails and source files on post delete
|
||||||
|
# Original functionality is no, to mitigate the impacts of admins going
|
||||||
|
# on unchecked post purges.
|
||||||
|
delete_source_files = false
|
||||||
|
|
||||||
|
contact_email = "admin@mail.example" # Meant for manual password reset procedures
|
||||||
|
|
||||||
|
enable_safety = true
|
||||||
|
|
||||||
|
tag_name_regex = '^\S+$'
|
||||||
|
tag_category_name_regex = '^[^\s%+#/]+$'
|
||||||
|
|
||||||
|
pool_name_regex = '^\S+$'
|
||||||
|
pool_category_name_regex = '^[^\s%+#/]+$'
|
||||||
|
|
||||||
|
# don't make these more restrictive unless you want to annoy people; if you do
|
||||||
|
# customize them, make sure to update the instructions in the registration form
|
||||||
|
# template as well.
|
||||||
|
password_regex = '^.{5,}$'
|
||||||
|
user_name_regex = '^[a-zA-Z0-9_-]{1,32}$'
|
||||||
|
|
||||||
|
# allow posts to be uploaded even if some image processing errors occur
|
||||||
|
allow_broken_uploads = false
|
||||||
|
|
||||||
|
# webhooks to call when events occur (such as post/tag/user/etc. changes)
|
||||||
|
# the listed urls will be called with a HTTP POST request with a payload
|
||||||
|
# containing a snapshot resource as JSON. See doc/API.md for details
|
||||||
|
# webhooks = [
|
||||||
|
# "https://api.example.com/webhooks/", '
|
||||||
|
# ]
|
||||||
|
|
||||||
|
default_rank = "regular"
|
||||||
|
|
||||||
|
[privileges]
|
||||||
|
"users:create:self" = "anonymous" # Registration permission
|
||||||
|
"users:create:any" = "administrator"
|
||||||
|
"users:list" = "regular"
|
||||||
|
"users:view" = "regular"
|
||||||
|
"users:edit:any:name" = "moderator"
|
||||||
|
"users:edit:any:pass" = "moderator"
|
||||||
|
"users:edit:any:email" = "moderator"
|
||||||
|
"users:edit:any:avatar" = "moderator"
|
||||||
|
"users:edit:any:rank" = "moderator"
|
||||||
|
"users:edit:self:name" = "regular"
|
||||||
|
"users:edit:self:pass" = "regular"
|
||||||
|
"users:edit:self:email" = "regular"
|
||||||
|
"users:edit:self:avatar" = "regular"
|
||||||
|
"users:edit:self:rank" = "moderator" # one can't promote themselves or anyone to upper rank than their own.
|
||||||
|
"users:delete:any" = "administrator"
|
||||||
|
"users:delete:self" = "regular"
|
||||||
|
|
||||||
|
"userTokens:list:any" = "administrator"
|
||||||
|
"userTokens:list:self" = "regular"
|
||||||
|
"userTokens:create:any" = "administrator"
|
||||||
|
"userTokens:create:self" = "regular"
|
||||||
|
"userTokens:edit:any" = "administrator"
|
||||||
|
"userTokens:edit:self" = "regular"
|
||||||
|
"userTokens:delete:any" = "administrator"
|
||||||
|
"userTokens:delete:self" = "regular"
|
||||||
|
|
||||||
|
"posts:create:anonymous" = "regular"
|
||||||
|
"posts:create:identified" = "regular"
|
||||||
|
"posts:list" = "anonymous"
|
||||||
|
"posts:reverseSearch" = "regular"
|
||||||
|
"posts:view" = "anonymous"
|
||||||
|
"posts:view:featured" = "anonymous"
|
||||||
|
"posts:edit:content" = "power"
|
||||||
|
"posts:edit:flags" = "regular"
|
||||||
|
"posts:edit:notes" = "regular"
|
||||||
|
"posts:edit:relations" = "regular"
|
||||||
|
"posts:edit:safety" = "power"
|
||||||
|
"posts:edit:source" = "regular"
|
||||||
|
"posts:edit:tags" = "regular"
|
||||||
|
"posts:edit:thumbnail" = "power"
|
||||||
|
"posts:feature" = "moderator"
|
||||||
|
"posts:delete" = "moderator"
|
||||||
|
"posts:score" = "regular"
|
||||||
|
"posts:merge" = "moderator"
|
||||||
|
"posts:favorite" = "regular"
|
||||||
|
"posts:bulk-edit:tags" = "power"
|
||||||
|
"posts:bulk-edit:safety" = "power"
|
||||||
|
"posts:bulk-edit:delete" = "power"
|
||||||
|
|
||||||
|
"tags:create" = "regular"
|
||||||
|
"tags:edit:names" = "power"
|
||||||
|
"tags:edit:category" = "power"
|
||||||
|
"tags:edit:description" = "power"
|
||||||
|
"tags:edit:implications" = "power"
|
||||||
|
"tags:edit:suggestions" = "power"
|
||||||
|
"tags:list" = "regular"
|
||||||
|
"tags:view" = "anonymous"
|
||||||
|
"tags:merge" = "moderator"
|
||||||
|
"tags:delete" = "moderator"
|
||||||
|
|
||||||
|
"tagCategories:create" = "moderator"
|
||||||
|
"tagCategories:edit:name" = "moderator"
|
||||||
|
"tagCategories:edit:color" = "moderator"
|
||||||
|
"tagCategories:edit:order" = "moderator"
|
||||||
|
"tagCategories:list" = "anonymous"
|
||||||
|
"tagCategories:view" = "anonymous"
|
||||||
|
"tagCategories:delete" = "moderator"
|
||||||
|
"tagCategories:setDefault" = "moderator"
|
||||||
|
|
||||||
|
"pools:create" = "regular"
|
||||||
|
"pools:edit:names" = "power"
|
||||||
|
"pools:edit:category" = "power"
|
||||||
|
"pools:edit:description" = "power"
|
||||||
|
"pools:edit:posts" = "power"
|
||||||
|
"pools:list" = "regular"
|
||||||
|
"pools:view" = "anonymous"
|
||||||
|
"pools:merge" = "moderator"
|
||||||
|
"pools:delete" = "moderator"
|
||||||
|
|
||||||
|
"poolCategories:create" = "moderator"
|
||||||
|
"poolCategories:edit:name" = "moderator"
|
||||||
|
"poolCategories:edit:color" = "moderator"
|
||||||
|
"poolCategories:list" = "anonymous"
|
||||||
|
"poolCategories:view" = "anonymous"
|
||||||
|
"poolCategories:delete" = "moderator"
|
||||||
|
"poolCategories:setDefault" = "moderator"
|
||||||
|
|
||||||
|
"comments:create" = "regular"
|
||||||
|
"comments:delete:any" = "moderator"
|
||||||
|
"comments:delete:own" = "regular"
|
||||||
|
"comments:edit:any" = "moderator"
|
||||||
|
"comments:edit:own" = "regular"
|
||||||
|
"comments:list" = "regular"
|
||||||
|
"comments:view" = "regular"
|
||||||
|
"comments:score" = "regular"
|
||||||
|
|
||||||
|
"snapshots:list" = "power"
|
||||||
|
|
||||||
|
"uploads:create" = "regular"
|
||||||
|
"uploads:useDownloader" = "power"
|
||||||
|
|
||||||
|
[thumbnails]
|
||||||
|
avatar_width = 300
|
||||||
|
avatar_height = 300
|
||||||
|
post_width = 300
|
||||||
|
post_height = 300
|
||||||
|
|
||||||
|
[smtp]
|
||||||
|
enabled = false
|
||||||
|
host = "smtp.mail.example"
|
||||||
|
port = 25
|
||||||
|
user = "booru"
|
||||||
|
pass = "booru"
|
||||||
|
from = "booru@mail.example"
|
||||||
5
client/Dockerfile
Normal file
5
client/Dockerfile
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
FROM szurubooru/client:2.5
|
||||||
|
COPY nginx.conf.docker /etc/nginx/nginx.conf
|
||||||
|
|
||||||
|
CMD ["/docker-start.sh"]
|
||||||
|
VOLUME ["/data"]
|
||||||
101
client/nginx.conf.docker
Normal file
101
client/nginx.conf.docker
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
worker_processes 1;
|
||||||
|
user nginx;
|
||||||
|
|
||||||
|
error_log /dev/stderr warn;
|
||||||
|
pid /var/run/nginx.pid;
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 1024;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
include /etc/nginx/mime.types;
|
||||||
|
default_type application/octet-stream;
|
||||||
|
|
||||||
|
log_format main '$remote_addr -> $request [$status] - '
|
||||||
|
'referer: $http_referer $http_x_forwarded_for';
|
||||||
|
access_log /dev/stdout main;
|
||||||
|
|
||||||
|
server_tokens off;
|
||||||
|
keepalive_timeout 65;
|
||||||
|
|
||||||
|
upstream backend {
|
||||||
|
server __BACKEND__;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80 default_server;
|
||||||
|
|
||||||
|
location ~ ^/api$ {
|
||||||
|
return 302 /api/;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~ ^/api/(.*)$ {
|
||||||
|
tcp_nodelay on;
|
||||||
|
|
||||||
|
add_header 'Access-Control-Allow-Origin' '*';
|
||||||
|
if ($request_method = 'OPTIONS') {
|
||||||
|
add_header 'Access-Control-Allow-Methods'
|
||||||
|
'GET, POST, PUT, DELETE, OPTIONS';
|
||||||
|
add_header 'Access-Control-Allow-Headers'
|
||||||
|
'Authorization, Content-Type';
|
||||||
|
return 200;
|
||||||
|
}
|
||||||
|
|
||||||
|
client_max_body_size 1073741824;
|
||||||
|
|
||||||
|
gzip on;
|
||||||
|
gzip_comp_level 3;
|
||||||
|
gzip_min_length 20;
|
||||||
|
gzip_proxied expired no-cache no-store private auth;
|
||||||
|
gzip_types text/plain application/json;
|
||||||
|
|
||||||
|
if ($request_uri ~* "/api/(.*)") {
|
||||||
|
proxy_pass http://backend/$1;
|
||||||
|
}
|
||||||
|
|
||||||
|
error_page 500 502 503 504 @badproxy;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /data/ {
|
||||||
|
rewrite ^/data/(.*) /$1 break;
|
||||||
|
root /data;
|
||||||
|
|
||||||
|
sendfile on;
|
||||||
|
tcp_nopush on;
|
||||||
|
tcp_nodelay on;
|
||||||
|
|
||||||
|
error_page 403 @unauthorized;
|
||||||
|
error_page 404 @notfound;
|
||||||
|
}
|
||||||
|
|
||||||
|
location / {
|
||||||
|
root /var/www;
|
||||||
|
try_files $uri /index.htm;
|
||||||
|
|
||||||
|
sendfile on;
|
||||||
|
tcp_nopush on;
|
||||||
|
tcp_nodelay on;
|
||||||
|
|
||||||
|
gzip_static on;
|
||||||
|
gzip_proxied expired no-cache no-store private auth;
|
||||||
|
}
|
||||||
|
|
||||||
|
location @unauthorized {
|
||||||
|
return 403 "Unauthorized";
|
||||||
|
default_type text/plain;
|
||||||
|
}
|
||||||
|
|
||||||
|
location @notfound {
|
||||||
|
return 404 "Not Found";
|
||||||
|
default_type text/plain;
|
||||||
|
}
|
||||||
|
|
||||||
|
location @badproxy {
|
||||||
|
return 502 "Failed to connect to szurubooru REST API";
|
||||||
|
default_type text/plain;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
daemon off;
|
||||||
182
config.inc.php
Normal file
182
config.inc.php
Normal file
|
|
@ -0,0 +1,182 @@
|
||||||
|
<?php
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Central phpPgAdmin configuration. As a user you may modify the
|
||||||
|
* settings here for your particular configuration.
|
||||||
|
*
|
||||||
|
* $Id: config.inc.php-dist,v 1.55 2008/02/18 21:10:31 xzilla Exp $
|
||||||
|
*/
|
||||||
|
|
||||||
|
// An example server. Create as many of these as you wish,
|
||||||
|
// indexed from zero upwards.
|
||||||
|
|
||||||
|
// Display name for the server on the login screen
|
||||||
|
$conf['servers'][0]['desc'] = 'PostgreSQL';
|
||||||
|
|
||||||
|
// Hostname or IP address for server. Use '' for UNIX domain socket.
|
||||||
|
// use 'localhost' for TCP/IP connection on this computer
|
||||||
|
$conf['servers'][0]['host'] = 'postgresql';
|
||||||
|
|
||||||
|
// Database port on server (5432 is the PostgreSQL default)
|
||||||
|
$conf['servers'][0]['port'] = 5432;
|
||||||
|
|
||||||
|
// Database SSL mode
|
||||||
|
// Possible options: disable, allow, prefer, require
|
||||||
|
// To require SSL on older servers use option: legacy
|
||||||
|
// To ignore the SSL mode, use option: unspecified
|
||||||
|
$conf['servers'][0]['sslmode'] = 'allow';
|
||||||
|
|
||||||
|
// Change the default database only if you cannot connect to template1.
|
||||||
|
// For a PostgreSQL 8.1+ server, you can set this to 'postgres'.
|
||||||
|
$conf['servers'][0]['defaultdb'] = 'template1';
|
||||||
|
|
||||||
|
// Specify the path to the database dump utilities for this server.
|
||||||
|
// You can set these to '' if no dumper is available.
|
||||||
|
$conf['servers'][0]['pg_dump_path'] = '/usr/bin/pg_dump';
|
||||||
|
$conf['servers'][0]['pg_dumpall_path'] = '/usr/bin/pg_dumpall';
|
||||||
|
|
||||||
|
// Example for a second server (PostgreSQL for Windows)
|
||||||
|
//$conf['servers'][1]['desc'] = 'Test Server';
|
||||||
|
//$conf['servers'][1]['host'] = '127.0.0.1';
|
||||||
|
//$conf['servers'][1]['port'] = 5432;
|
||||||
|
//$conf['servers'][1]['sslmode'] = 'allow';
|
||||||
|
//$conf['servers'][1]['defaultdb'] = 'template1';
|
||||||
|
//$conf['servers'][1]['pg_dump_path'] = 'C:\\Program Files\\PostgreSQL\\8.0\\bin\\pg_dump.exe';
|
||||||
|
//$conf['servers'][1]['pg_dumpall_path'] = 'C:\\Program Files\\PostgreSQL\\8.0\\bin\\pg_dumpall.exe';
|
||||||
|
|
||||||
|
/* Groups definition */
|
||||||
|
/* Groups allow administrators to logicaly group servers together under
|
||||||
|
* group nodes in the left browser tree
|
||||||
|
*
|
||||||
|
* The group '0' description
|
||||||
|
*/
|
||||||
|
//$conf['srv_groups'][0]['desc'] = 'group one';
|
||||||
|
|
||||||
|
/* Add here servers indexes belonging to the group '0' separated by comma */
|
||||||
|
//$conf['srv_groups'][0]['servers'] = '0,1,2';
|
||||||
|
|
||||||
|
/* A server can belong to multi groups. Here server 1 is referenced in both
|
||||||
|
* 'group one' and 'group two'*/
|
||||||
|
//$conf['srv_groups'][1]['desc'] = 'group two';
|
||||||
|
//$conf['srv_groups'][1]['servers'] = '3,1';
|
||||||
|
|
||||||
|
/* A group can be nested in one or more existing groups using the 'parents'
|
||||||
|
* parameter. Here the group 'group three' contains only one server and will
|
||||||
|
* appear as a subgroup in both 'group one' and 'group two':
|
||||||
|
*/
|
||||||
|
//$conf['srv_groups'][2]['desc'] = 'group three';
|
||||||
|
//$conf['srv_groups'][2]['servers'] = '4';
|
||||||
|
//$conf['srv_groups'][2]['parents'] = '0,1';
|
||||||
|
|
||||||
|
/* Warning: Only groups with no parents appears at the root of the tree. */
|
||||||
|
|
||||||
|
/* You can apply specific theme depending on servers, users and databases
|
||||||
|
* The priority order is :
|
||||||
|
* * the theme defined for a server
|
||||||
|
* * the theme defined for a database apply over the server one
|
||||||
|
* * the theme defined for a user apply over the database one
|
||||||
|
*/
|
||||||
|
/* Example for servers */
|
||||||
|
//$conf['servers'][0]['theme']['default'] = 'default';
|
||||||
|
/* Example for users */
|
||||||
|
//$conf['servers'][0]['theme']['user']['specific_user'] = 'default';
|
||||||
|
/* Example for databases */
|
||||||
|
//$conf['servers'][0]['theme']['db']['specific_db'] = 'default';
|
||||||
|
|
||||||
|
// Default language. E.g.: 'english', 'polish', etc. See lang/ directory
|
||||||
|
// for all possibilities. If you specify 'auto' (the default) it will use
|
||||||
|
// your browser preference.
|
||||||
|
$conf['default_lang'] = 'russian';
|
||||||
|
|
||||||
|
// If extra session security is true, then PHP's session cookies will have
|
||||||
|
// SameSite cookie flags set to prevent CSRF attacks. If you're using
|
||||||
|
// auto-start sessions, autostarted sessions will be destroyed and
|
||||||
|
// restarted with SameSite on. If this this solution is not acceptable for
|
||||||
|
// your situation, you will need to either turn off auot-start sessions, or
|
||||||
|
// turn off secure sessions. Versions of PHP below 7.3 do not have access
|
||||||
|
// to this feature and will be vulnerable to CSRF attacks.
|
||||||
|
$conf['extra_session_security'] = true;
|
||||||
|
|
||||||
|
// AutoComplete uses AJAX interaction to list foreign key values
|
||||||
|
// on insert fields. It currently only works on single column
|
||||||
|
// foreign keys. You can choose one of the following values:
|
||||||
|
// 'default on' enables AutoComplete and turns it on by default.
|
||||||
|
// 'default off' enables AutoComplete but turns it off by default.
|
||||||
|
// 'disable' disables AutoComplete.
|
||||||
|
$conf['autocomplete'] = 'default on';
|
||||||
|
|
||||||
|
// If extra login security is true, then logins via phpPgAdmin with no
|
||||||
|
// password or certain usernames (pgsql, postgres, root, administrator)
|
||||||
|
// will be denied. Only set this false once you have read the FAQ and
|
||||||
|
// understand how to change PostgreSQL's pg_hba.conf to enable
|
||||||
|
// passworded local connections.
|
||||||
|
$conf['extra_login_security'] = true;
|
||||||
|
|
||||||
|
// Only show owned databases?
|
||||||
|
// Note: This will simply hide other databases in the list - this does
|
||||||
|
// not in any way prevent your users from seeing other database by
|
||||||
|
// other means. (e.g. Run 'SELECT * FROM pg_database' in the SQL area.)
|
||||||
|
$conf['owned_only'] = false;
|
||||||
|
|
||||||
|
// Display comments on objects? Comments are a good way of documenting
|
||||||
|
// a database, but they do take up space in the interface.
|
||||||
|
$conf['show_comments'] = true;
|
||||||
|
|
||||||
|
// Display "advanced" objects? Setting this to true will show
|
||||||
|
// aggregates, types, operators, operator classes, conversions,
|
||||||
|
// languages and casts in phpPgAdmin. These objects are rarely
|
||||||
|
// administered and can clutter the interface.
|
||||||
|
$conf['show_advanced'] = false;
|
||||||
|
|
||||||
|
// Display "system" objects?
|
||||||
|
$conf['show_system'] = false;
|
||||||
|
|
||||||
|
// Minimum length users can set their password to.
|
||||||
|
$conf['min_password_length'] = 1;
|
||||||
|
|
||||||
|
// Width of the left frame in pixels (object browser)
|
||||||
|
$conf['left_width'] = 200;
|
||||||
|
|
||||||
|
// Which look & feel theme to use
|
||||||
|
$conf['theme'] = 'default';
|
||||||
|
|
||||||
|
// Show OIDs when browsing tables?
|
||||||
|
// Only supported in versions <=11
|
||||||
|
$conf['show_oids'] = false;
|
||||||
|
|
||||||
|
// Max rows to show on a page when browsing record sets
|
||||||
|
$conf['max_rows'] = 30;
|
||||||
|
|
||||||
|
// Max chars of each field to display by default in browse mode
|
||||||
|
$conf['max_chars'] = 50;
|
||||||
|
|
||||||
|
// Send XHTML strict headers?
|
||||||
|
$conf['use_xhtml_strict'] = false;
|
||||||
|
|
||||||
|
// Base URL for PostgreSQL documentation.
|
||||||
|
// '%s', if present, will be replaced with the PostgreSQL version
|
||||||
|
// (e.g. 8.4 )
|
||||||
|
$conf['help_base'] = 'http://www.postgresql.org/docs/%s/interactive/';
|
||||||
|
|
||||||
|
// Configuration for ajax scripts
|
||||||
|
// Time in seconds. If set to 0, refreshing data using ajax will be disabled (locks and activity pages)
|
||||||
|
$conf['ajax_refresh'] = 3;
|
||||||
|
|
||||||
|
/** Plugins management
|
||||||
|
* Add plugin names to the following array to activate them
|
||||||
|
* Example:
|
||||||
|
* $conf['plugins'] = array(
|
||||||
|
* 'Example',
|
||||||
|
* 'Slony'
|
||||||
|
* );
|
||||||
|
*/
|
||||||
|
$conf['plugins'] = array();
|
||||||
|
|
||||||
|
/*****************************************
|
||||||
|
* Don't modify anything below this line *
|
||||||
|
*****************************************/
|
||||||
|
|
||||||
|
$conf['version'] = 19;
|
||||||
|
|
||||||
|
?>
|
||||||
|
|
||||||
39
docker-compose_templ.yaml
Normal file
39
docker-compose_templ.yaml
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
services:
|
||||||
|
phppgadmin:
|
||||||
|
container_name: phppgadmin
|
||||||
|
restart: unless-stopped
|
||||||
|
image: ngosang/phppgadmin:7.14.6-mod-1
|
||||||
|
volumes:
|
||||||
|
- ./config.inc.php:/var/www/app/conf/config.inc.php:ro
|
||||||
|
ports:
|
||||||
|
- 8080:80
|
||||||
|
|
||||||
|
frontend:
|
||||||
|
# Based on szurubooru/client:2.5
|
||||||
|
build: ./client
|
||||||
|
extra_hosts:
|
||||||
|
- "host.docker.internal:host-gateway"
|
||||||
|
environment:
|
||||||
|
# Timezone
|
||||||
|
TZ: 'Europe/Moscow'
|
||||||
|
# Backend "hostname:port"
|
||||||
|
BACKEND_HOST: 'host.docker.internal:6667'
|
||||||
|
BASE_URL:
|
||||||
|
volumes:
|
||||||
|
- ./data:/data:ro
|
||||||
|
ports:
|
||||||
|
- 80:80
|
||||||
|
|
||||||
|
postgresql:
|
||||||
|
image: postgres:11-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: axumbooru
|
||||||
|
POSTGRES_PASSWORD: axumbooru
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
volumes:
|
||||||
|
- db-data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
db-data:
|
||||||
22
migration/Cargo.toml
Normal file
22
migration/Cargo.toml
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
[package]
|
||||||
|
name = "migration"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "migration"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
async-std = { version = "1", features = ["attributes", "tokio1"] }
|
||||||
|
|
||||||
|
[dependencies.sea-orm-migration]
|
||||||
|
version = "0.12.14"
|
||||||
|
features = [
|
||||||
|
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
|
||||||
|
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
|
||||||
|
# e.g.
|
||||||
|
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
|
||||||
|
"sqlx-postgres", # `DATABASE_DRIVER` feature
|
||||||
|
]
|
||||||
41
migration/README.md
Normal file
41
migration/README.md
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
# Running Migrator CLI
|
||||||
|
|
||||||
|
- Generate a new migration file
|
||||||
|
```sh
|
||||||
|
cargo run -- generate MIGRATION_NAME
|
||||||
|
```
|
||||||
|
- Apply all pending migrations
|
||||||
|
```sh
|
||||||
|
cargo run
|
||||||
|
```
|
||||||
|
```sh
|
||||||
|
cargo run -- up
|
||||||
|
```
|
||||||
|
- Apply first 10 pending migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- up -n 10
|
||||||
|
```
|
||||||
|
- Rollback last applied migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- down
|
||||||
|
```
|
||||||
|
- Rollback last 10 applied migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- down -n 10
|
||||||
|
```
|
||||||
|
- Drop all tables from the database, then reapply all migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- fresh
|
||||||
|
```
|
||||||
|
- Rollback all applied migrations, then reapply all migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- refresh
|
||||||
|
```
|
||||||
|
- Rollback all applied migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- reset
|
||||||
|
```
|
||||||
|
- Check the status of all migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- status
|
||||||
|
```
|
||||||
20
migration/src/lib.rs
Normal file
20
migration/src/lib.rs
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
pub use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
mod m20240225_224934_create_user;
|
||||||
|
mod m20240227_020126_create_post;
|
||||||
|
mod m20240309_230819_create_user_token;
|
||||||
|
mod m20240309_230808_create_snapshot;
|
||||||
|
|
||||||
|
pub struct Migrator;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigratorTrait for Migrator {
|
||||||
|
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||||
|
vec![
|
||||||
|
Box::new(m20240225_224934_create_user::Migration),
|
||||||
|
Box::new(m20240227_020126_create_post::Migration),
|
||||||
|
Box::new(m20240309_230808_create_snapshot::Migration),
|
||||||
|
Box::new(m20240309_230819_create_user_token::Migration),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
68
migration/src/m20240225_224934_create_user.rs
Normal file
68
migration/src/m20240225_224934_create_user.rs
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(User::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(User::Id)
|
||||||
|
.integer()
|
||||||
|
.not_null()
|
||||||
|
.auto_increment()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(User::Name)
|
||||||
|
.string_len(50)
|
||||||
|
.not_null()
|
||||||
|
.unique_key(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(User::PasswordHash).string_len(128).not_null())
|
||||||
|
.col(ColumnDef::new(User::PasswordSalt).string_len(32))
|
||||||
|
.col(ColumnDef::new(User::Email).string_len(64))
|
||||||
|
.col(ColumnDef::new(User::Rank).string_len(32).not_null())
|
||||||
|
.col(ColumnDef::new(User::CreationTime).timestamp().not_null())
|
||||||
|
.col(ColumnDef::new(User::LastLoginTime).timestamp())
|
||||||
|
.col(ColumnDef::new(User::AvatarStyle).string_len(32).not_null())
|
||||||
|
.col(ColumnDef::new(User::Version).integer().not_null())
|
||||||
|
.col(ColumnDef::new(User::PasswordRevision).small_integer().not_null())
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(User::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
pub(super) enum User {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
#[sea_orm(iden = "password_hash")]
|
||||||
|
PasswordHash,
|
||||||
|
#[sea_orm(iden = "password_salt")]
|
||||||
|
PasswordSalt,
|
||||||
|
Email,
|
||||||
|
Rank,
|
||||||
|
#[sea_orm(iden = "creation_time")]
|
||||||
|
CreationTime,
|
||||||
|
#[sea_orm(iden = "last_login_time")]
|
||||||
|
LastLoginTime,
|
||||||
|
#[sea_orm(iden = "avatar_style")]
|
||||||
|
AvatarStyle,
|
||||||
|
Version,
|
||||||
|
#[sea_orm(iden = "password_revision")]
|
||||||
|
PasswordRevision,
|
||||||
|
}
|
||||||
82
migration/src/m20240227_020126_create_post.rs
Normal file
82
migration/src/m20240227_020126_create_post.rs
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
use crate::m20240225_224934_create_user::User;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Post::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Post::Id)
|
||||||
|
.integer()
|
||||||
|
.not_null()
|
||||||
|
.auto_increment()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(Post::UserId).integer())
|
||||||
|
.col(ColumnDef::new(Post::CreationTime).timestamp().not_null())
|
||||||
|
.col(ColumnDef::new(Post::LastEditTime).timestamp())
|
||||||
|
.col(ColumnDef::new(Post::Safety).string_len(32).not_null())
|
||||||
|
.col(ColumnDef::new(Post::Type).string_len(32).not_null())
|
||||||
|
.col(ColumnDef::new(Post::Checksum).string_len(64).not_null())
|
||||||
|
.col(ColumnDef::new(Post::Source).string_len(2048))
|
||||||
|
.col(ColumnDef::new(Post::FileSize).big_integer())
|
||||||
|
.col(ColumnDef::new(Post::ImageWidth).integer())
|
||||||
|
.col(ColumnDef::new(Post::ImageHeight).integer())
|
||||||
|
.col(ColumnDef::new(Post::MimeType).string_len(32).not_null())
|
||||||
|
.col(ColumnDef::new(Post::Version).integer().not_null())
|
||||||
|
.col(ColumnDef::new(Post::Flags).string_len(32))
|
||||||
|
.col(ColumnDef::new(Post::ChecksumMD5).string_len(32))
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("FK_posts_userid")
|
||||||
|
.from(Post::Table, Post::UserId)
|
||||||
|
.to(User::Table, User::Id)
|
||||||
|
.on_delete(ForeignKeyAction::SetNull),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Post::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Post {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
#[sea_orm(iden = "user_id")]
|
||||||
|
UserId,
|
||||||
|
#[sea_orm(iden = "creation_time")]
|
||||||
|
CreationTime,
|
||||||
|
#[sea_orm(iden = "last_edit_time")]
|
||||||
|
LastEditTime,
|
||||||
|
Safety,
|
||||||
|
Type,
|
||||||
|
Checksum,
|
||||||
|
Source,
|
||||||
|
#[sea_orm(iden = "file_size")]
|
||||||
|
FileSize,
|
||||||
|
#[sea_orm(iden = "image_width")]
|
||||||
|
ImageWidth,
|
||||||
|
#[sea_orm(iden = "image_height")]
|
||||||
|
ImageHeight,
|
||||||
|
#[sea_orm(iden = "mime-type")]
|
||||||
|
MimeType,
|
||||||
|
Version,
|
||||||
|
Flags,
|
||||||
|
#[sea_orm(iden = "checksum_md5")]
|
||||||
|
ChecksumMD5,
|
||||||
|
}
|
||||||
65
migration/src/m20240309_230808_create_snapshot.rs
Normal file
65
migration/src/m20240309_230808_create_snapshot.rs
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
use crate::m20240225_224934_create_user::User;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Snapshot::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(Snapshot::Id)
|
||||||
|
.integer()
|
||||||
|
.not_null()
|
||||||
|
.auto_increment()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(Snapshot::CreationTime).timestamp().not_null())
|
||||||
|
.col(ColumnDef::new(Snapshot::ResourceType).string_len(32).not_null())
|
||||||
|
.col(ColumnDef::new(Snapshot::Operation).string_len(16).not_null())
|
||||||
|
.col(ColumnDef::new(Snapshot::UserId).integer())
|
||||||
|
.col(ColumnDef::new(Snapshot::Data).binary())
|
||||||
|
.col(ColumnDef::new(Snapshot::ResourceName).string_len(128).not_null())
|
||||||
|
.col(ColumnDef::new(Snapshot::ResourcePkey).integer().not_null())
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("FK_snapshot_userid")
|
||||||
|
.from(Snapshot::Table, Snapshot::UserId)
|
||||||
|
.to(User::Table, User::Id)
|
||||||
|
.on_delete(ForeignKeyAction::SetNull),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(Snapshot::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Snapshot {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
#[sea_orm(iden = "creation_time")]
|
||||||
|
CreationTime,
|
||||||
|
#[sea_orm(iden = "resource_type")]
|
||||||
|
ResourceType,
|
||||||
|
Operation,
|
||||||
|
#[sea_orm(iden = "user_id")]
|
||||||
|
UserId,
|
||||||
|
Data,
|
||||||
|
#[sea_orm(iden = "resource_name")]
|
||||||
|
ResourceName,
|
||||||
|
#[sea_orm(iden = "resource_pkey")]
|
||||||
|
ResourcePkey,
|
||||||
|
}
|
||||||
71
migration/src/m20240309_230819_create_user_token.rs
Normal file
71
migration/src/m20240309_230819_create_user_token.rs
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
use crate::m20240225_224934_create_user::User;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(UserToken::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(
|
||||||
|
ColumnDef::new(UserToken::Id)
|
||||||
|
.integer()
|
||||||
|
.not_null()
|
||||||
|
.auto_increment()
|
||||||
|
.primary_key(),
|
||||||
|
)
|
||||||
|
.col(ColumnDef::new(UserToken::UserId).integer().not_null())
|
||||||
|
.col(ColumnDef::new(UserToken::Token).string_len(36).not_null())
|
||||||
|
.col(ColumnDef::new(UserToken::Note).string_len(128))
|
||||||
|
.col(ColumnDef::new(UserToken::Enabled).boolean().not_null())
|
||||||
|
.col(ColumnDef::new(UserToken::ExpirationTime).timestamp())
|
||||||
|
.col(ColumnDef::new(UserToken::CreationTime).timestamp().not_null())
|
||||||
|
.col(ColumnDef::new(UserToken::LastEditTime).timestamp())
|
||||||
|
.col(ColumnDef::new(UserToken::LastUsageTime).timestamp())
|
||||||
|
.col(ColumnDef::new(UserToken::Version).integer().not_null())
|
||||||
|
.foreign_key(
|
||||||
|
ForeignKey::create()
|
||||||
|
.name("FK_user_token_userid")
|
||||||
|
.from(UserToken::Table, UserToken::UserId)
|
||||||
|
.to(User::Table, User::Id)
|
||||||
|
.on_delete(ForeignKeyAction::Cascade),
|
||||||
|
)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(UserToken::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum UserToken {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
#[sea_orm(iden = "user_id")]
|
||||||
|
UserId,
|
||||||
|
Token,
|
||||||
|
Note,
|
||||||
|
Enabled,
|
||||||
|
#[sea_orm(iden = "expiration_time")]
|
||||||
|
ExpirationTime,
|
||||||
|
#[sea_orm(iden = "creation_time")]
|
||||||
|
CreationTime,
|
||||||
|
#[sea_orm(iden = "last_edit_time")]
|
||||||
|
LastEditTime,
|
||||||
|
#[sea_orm(iden = "last_usage_time")]
|
||||||
|
LastUsageTime,
|
||||||
|
Version,
|
||||||
|
}
|
||||||
6
migration/src/main.rs
Normal file
6
migration/src/main.rs
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[async_std::main]
|
||||||
|
async fn main() {
|
||||||
|
cli::run_cli(migration::Migrator).await;
|
||||||
|
}
|
||||||
63
src/api/data.rs
Normal file
63
src/api/data.rs
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
use axum::{extract::{Multipart, State}, routing::get_service, Json, Router};
|
||||||
|
use serde::Serialize;
|
||||||
|
use tower_http::services::ServeDir;
|
||||||
|
use log::{debug, info};
|
||||||
|
use std::{fs, sync::Arc};
|
||||||
|
|
||||||
|
use crate::{data::DATA, error::{ApiError, ApiResult}, AppState};
|
||||||
|
|
||||||
|
pub fn data_static() -> Router {
|
||||||
|
Router::new().nest_service("/", get_service(ServeDir::new(DATA)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_folder_size(path: &str) -> Result<u64, std::io::Error> {
|
||||||
|
// Получение списка элементов в папке
|
||||||
|
let entries = fs::read_dir(path)?;
|
||||||
|
// Инициализация переменной для хранения размера папки
|
||||||
|
let mut total_size: u64 = 0;
|
||||||
|
// Итерация по элементам папки
|
||||||
|
for entry in entries {
|
||||||
|
// Получение метаданных элемента
|
||||||
|
let metadata = entry?.metadata()?;
|
||||||
|
// Если элемент является файлом, добавляем его размер к общему размеру
|
||||||
|
if metadata.is_file() {
|
||||||
|
total_size += metadata.len();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
debug!("Data dir size: {total_size}");
|
||||||
|
// Возвращение общего размера папки
|
||||||
|
Ok(total_size)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct UploadResponse {
|
||||||
|
pub token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_upload_error(_: anyhow::Error) -> ApiError {
|
||||||
|
ApiError::Uploads
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn upload(State(state): State<Arc<AppState>>, mut multipart: Multipart) -> ApiResult<Json<UploadResponse>> {
|
||||||
|
let mut token: Option<String> = None;
|
||||||
|
while let Some(field) = multipart.next_field().await.unwrap() {
|
||||||
|
let name = field.name().unwrap().to_string();
|
||||||
|
// debug!("Multipart: {:?} {:?} {:?} {:?}", field.content_type(), field.file_name(), field.name(), field.headers());
|
||||||
|
if name == "content".to_string() {
|
||||||
|
let filename = field.file_name().unwrap().to_owned();
|
||||||
|
let content_type = field.content_type().unwrap().to_owned();
|
||||||
|
let (_, extension) = filename.split_once('.').expect("Damaged file");
|
||||||
|
let data = field.bytes().await.unwrap();
|
||||||
|
token = Some(state.uploads.lock().expect("Uploads mutex was poisoned!").add(extension, &content_type, data).map_err(to_upload_error)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match token {
|
||||||
|
Some(token) => {
|
||||||
|
info!("Responding token: {token}");
|
||||||
|
return Ok(Json(UploadResponse { token }))
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
panic!("DOESN'T HAVE CONTENT!")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
226
src/api/info.rs
Normal file
226
src/api/info.rs
Normal file
|
|
@ -0,0 +1,226 @@
|
||||||
|
use crate::{
|
||||||
|
config::Privileges,
|
||||||
|
AppState, Config, UserRank,
|
||||||
|
};
|
||||||
|
use axum::{extract::State, Json};
|
||||||
|
use chrono::prelude::*;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use log::debug;
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct InfoAnswer {
|
||||||
|
#[serde(rename = "postCount")]
|
||||||
|
post_count: u64,
|
||||||
|
#[serde(rename = "diskUsage")]
|
||||||
|
disk_usage: u64,
|
||||||
|
#[serde(rename = "serverTime")]
|
||||||
|
server_time: chrono::NaiveDateTime,
|
||||||
|
#[serde(rename = "config")]
|
||||||
|
config: FrontendConfig,
|
||||||
|
#[serde(rename = "featuredPost")]
|
||||||
|
featured_post: Option<()>, // Temporarily removed
|
||||||
|
#[serde(rename = "featuringUser")]
|
||||||
|
featuring_user: Option<()>, // Temporarily removed
|
||||||
|
#[serde(rename = "featuringTime")]
|
||||||
|
featuring_time: Option<NaiveDateTime>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct FrontendConfig {
|
||||||
|
#[serde(rename = "name")]
|
||||||
|
name: String,
|
||||||
|
#[serde(rename = "userNameRegex")]
|
||||||
|
user_name_regex: String,
|
||||||
|
#[serde(rename = "passwordRegex")]
|
||||||
|
password_regex: String,
|
||||||
|
#[serde(rename = "tagNameRegex")]
|
||||||
|
tag_name_regex: String,
|
||||||
|
#[serde(rename = "tagCategoryNameRegex")]
|
||||||
|
tag_category_name_regex: String,
|
||||||
|
#[serde(rename = "defaultUserRank")]
|
||||||
|
default_user_rank: UserRank,
|
||||||
|
#[serde(rename = "enableSafety")]
|
||||||
|
enable_safety: bool,
|
||||||
|
#[serde(rename = "contactEmail")]
|
||||||
|
contact_email: String,
|
||||||
|
#[serde(rename = "canSendMails")]
|
||||||
|
can_send_mails: bool,
|
||||||
|
#[serde(rename = "privileges")]
|
||||||
|
privileges: Privileges,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FrontendConfig {
|
||||||
|
pub async fn from_config(config: Config) -> Self {
|
||||||
|
Self {
|
||||||
|
name: config.name,
|
||||||
|
user_name_regex: config.user_name_regex,
|
||||||
|
password_regex: config.password_regex,
|
||||||
|
tag_name_regex: config.tag_name_regex,
|
||||||
|
tag_category_name_regex: config.tag_category_name_regex,
|
||||||
|
default_user_rank: config.default_rank,
|
||||||
|
enable_safety: config.enable_safety,
|
||||||
|
contact_email: config.contact_email,
|
||||||
|
can_send_mails: config.smtp.enabled,
|
||||||
|
privileges: config.privileges,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// #[derive(Serialize, Deserialize)]
|
||||||
|
// pub struct FeaturedPost {
|
||||||
|
// #[serde(rename = "id")]
|
||||||
|
// id: i64,
|
||||||
|
// #[serde(rename = "version")]
|
||||||
|
// version: i64,
|
||||||
|
// #[serde(rename = "creationTime")]
|
||||||
|
// creation_time: chrono::NaiveDateTime,
|
||||||
|
// #[serde(rename = "lastEditTime")]
|
||||||
|
// last_edit_time: Option<chrono::NaiveDateTime>,
|
||||||
|
// #[serde(rename = "safety")]
|
||||||
|
// safety: String,
|
||||||
|
// #[serde(rename = "source")]
|
||||||
|
// source: Option<serde_json::Value>,
|
||||||
|
// #[serde(rename = "type")]
|
||||||
|
// featured_post_type: String,
|
||||||
|
// #[serde(rename = "mimeType")]
|
||||||
|
// mime_type: String,
|
||||||
|
// #[serde(rename = "checksum")]
|
||||||
|
// checksum: String,
|
||||||
|
// #[serde(rename = "checksumMD5")]
|
||||||
|
// checksum_md5: String,
|
||||||
|
// #[serde(rename = "fileSize")]
|
||||||
|
// file_size: i64,
|
||||||
|
// #[serde(rename = "canvasWidth")]
|
||||||
|
// canvas_width: i64,
|
||||||
|
// #[serde(rename = "canvasHeight")]
|
||||||
|
// canvas_height: i64,
|
||||||
|
// #[serde(rename = "contentUrl")]
|
||||||
|
// content_url: String,
|
||||||
|
// #[serde(rename = "thumbnailUrl")]
|
||||||
|
// thumbnail_url: String,
|
||||||
|
// #[serde(rename = "flags")]
|
||||||
|
// flags: Vec<String>,
|
||||||
|
// #[serde(rename = "tags")]
|
||||||
|
// tags: Vec<Tag>,
|
||||||
|
// #[serde(rename = "relations")]
|
||||||
|
// relations: Vec<Option<serde_json::Value>>,
|
||||||
|
// #[serde(rename = "user")]
|
||||||
|
// user: User,
|
||||||
|
// #[serde(rename = "score")]
|
||||||
|
// score: i64,
|
||||||
|
// #[serde(rename = "ownScore")]
|
||||||
|
// own_score: i64,
|
||||||
|
// #[serde(rename = "ownFavorite")]
|
||||||
|
// own_favorite: bool,
|
||||||
|
// #[serde(rename = "tagCount")]
|
||||||
|
// tag_count: i64,
|
||||||
|
// #[serde(rename = "favoriteCount")]
|
||||||
|
// favorite_count: i64,
|
||||||
|
// #[serde(rename = "commentCount")]
|
||||||
|
// comment_count: i64,
|
||||||
|
// #[serde(rename = "noteCount")]
|
||||||
|
// note_count: i64,
|
||||||
|
// #[serde(rename = "relationCount")]
|
||||||
|
// relation_count: i64,
|
||||||
|
// #[serde(rename = "featureCount")]
|
||||||
|
// feature_count: i64,
|
||||||
|
// #[serde(rename = "lastFeatureTime")]
|
||||||
|
// last_feature_time: Option<chrono::NaiveDateTime>,
|
||||||
|
// #[serde(rename = "favoritedBy")]
|
||||||
|
// favorited_by: Vec<Option<serde_json::Value>>,
|
||||||
|
// #[serde(rename = "hasCustomThumbnail")]
|
||||||
|
// has_custom_thumbnail: bool,
|
||||||
|
// #[serde(rename = "notes")]
|
||||||
|
// notes: Vec<Option<serde_json::Value>>,
|
||||||
|
// #[serde(rename = "comments")]
|
||||||
|
// comments: Vec<Comment>,
|
||||||
|
// #[serde(rename = "pools")]
|
||||||
|
// pools: Vec<Option<serde_json::Value>>,
|
||||||
|
// }
|
||||||
|
|
||||||
|
// #[derive(Serialize, Deserialize)]
|
||||||
|
// pub struct Comment {
|
||||||
|
// #[serde(rename = "id")]
|
||||||
|
// id: i64,
|
||||||
|
// #[serde(rename = "user")]
|
||||||
|
// user: User,
|
||||||
|
// #[serde(rename = "postId")]
|
||||||
|
// post_id: i64,
|
||||||
|
// #[serde(rename = "version")]
|
||||||
|
// version: i64,
|
||||||
|
// #[serde(rename = "text")]
|
||||||
|
// text: String,
|
||||||
|
// #[serde(rename = "creationTime")]
|
||||||
|
// creation_time: chrono::NaiveDateTime,
|
||||||
|
// #[serde(rename = "lastEditTime")]
|
||||||
|
// last_edit_time: Option<chrono::NaiveDateTime>,
|
||||||
|
// #[serde(rename = "score")]
|
||||||
|
// score: i64,
|
||||||
|
// #[serde(rename = "ownScore")]
|
||||||
|
// own_score: i64,
|
||||||
|
// }
|
||||||
|
|
||||||
|
// #[derive(Serialize, Deserialize)]
|
||||||
|
// pub struct User {
|
||||||
|
// #[serde(rename = "name")]
|
||||||
|
// name: String,
|
||||||
|
// #[serde(rename = "avatarUrl")]
|
||||||
|
// avatar_url: String,
|
||||||
|
// }
|
||||||
|
|
||||||
|
// #[derive(Serialize, Deserialize)]
|
||||||
|
// pub struct Tag {
|
||||||
|
// #[serde(rename = "names")]
|
||||||
|
// names: Vec<String>,
|
||||||
|
// #[serde(rename = "category")]
|
||||||
|
// category: String,
|
||||||
|
// #[serde(rename = "usages")]
|
||||||
|
// usages: i64,
|
||||||
|
// }
|
||||||
|
|
||||||
|
// #[derive(Serialize, Deserialize)]
|
||||||
|
// pub struct FeaturingUser {
|
||||||
|
// #[serde(rename = "name")]
|
||||||
|
// name: String,
|
||||||
|
// #[serde(rename = "creationTime")]
|
||||||
|
// creation_time: chrono::NaiveDateTime,
|
||||||
|
// #[serde(rename = "lastLoginTime")]
|
||||||
|
// last_login_time: Option<chrono::NaiveDateTime>,
|
||||||
|
// #[serde(rename = "version")]
|
||||||
|
// version: i64,
|
||||||
|
// #[serde(rename = "rank")]
|
||||||
|
// rank: UserRank,
|
||||||
|
// #[serde(rename = "avatarStyle")]
|
||||||
|
// avatar_style: String,
|
||||||
|
// #[serde(rename = "avatarUrl")]
|
||||||
|
// avatar_url: String,
|
||||||
|
// #[serde(rename = "commentCount")]
|
||||||
|
// comment_count: i64,
|
||||||
|
// #[serde(rename = "uploadedPostCount")]
|
||||||
|
// uploaded_post_count: i64,
|
||||||
|
// #[serde(rename = "favoritePostCount")]
|
||||||
|
// favorite_post_count: i64,
|
||||||
|
// #[serde(rename = "likedPostCount")]
|
||||||
|
// liked_post_count: bool,
|
||||||
|
// #[serde(rename = "dislikedPostCount")]
|
||||||
|
// disliked_post_count: bool,
|
||||||
|
// #[serde(rename = "email")]
|
||||||
|
// email: bool,
|
||||||
|
// }
|
||||||
|
|
||||||
|
pub async fn server_info(State(state): State<Arc<AppState>>) -> Json<InfoAnswer> {
|
||||||
|
debug!("called");
|
||||||
|
|
||||||
|
let info = InfoAnswer {
|
||||||
|
post_count: state.db.get_posts_count().await.unwrap(),
|
||||||
|
disk_usage: crate::api::data::get_folder_size("./data").unwrap(),
|
||||||
|
server_time: Local::now().naive_local(),
|
||||||
|
config: FrontendConfig::from_config(state.config.clone()).await,
|
||||||
|
featured_post: None,
|
||||||
|
featuring_user: None,
|
||||||
|
featuring_time: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Json(info)
|
||||||
|
}
|
||||||
6
src/api/mod.rs
Normal file
6
src/api/mod.rs
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
pub mod data;
|
||||||
|
pub mod info;
|
||||||
|
pub mod post;
|
||||||
|
pub mod test;
|
||||||
|
pub mod user;
|
||||||
|
pub mod usertoken;
|
||||||
4
src/api/post/mod.rs
Normal file
4
src/api/post/mod.rs
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
pub mod post;
|
||||||
|
pub use post::*;
|
||||||
|
|
||||||
|
pub mod model;
|
||||||
143
src/api/post/model.rs
Normal file
143
src/api/post/model.rs
Normal file
|
|
@ -0,0 +1,143 @@
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct ListOfPostsAnswer {
|
||||||
|
pub query: String,
|
||||||
|
pub offset: u64,
|
||||||
|
pub limit: u64,
|
||||||
|
pub total: u64,
|
||||||
|
pub results: Vec<MiniPost>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct MiniPost {
|
||||||
|
pub id: i32,
|
||||||
|
pub thumbnail_url: String,
|
||||||
|
pub r#type: String,
|
||||||
|
pub safety: String,
|
||||||
|
pub score: i32,
|
||||||
|
pub favorite_count: i32,
|
||||||
|
pub comment_count: i32,
|
||||||
|
pub tags: Vec<()>, // Vec<Tag>
|
||||||
|
pub version: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MiniPost {
|
||||||
|
pub fn from_model(
|
||||||
|
model: &crate::db::schemas::post::Model,
|
||||||
|
thumbnail_url: String,
|
||||||
|
score: i32,
|
||||||
|
favorite_count: i32,
|
||||||
|
comment_count: i32,
|
||||||
|
tags: Vec<()> // TODO: Vec<Tag>
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
id: model.id,
|
||||||
|
thumbnail_url,
|
||||||
|
r#type: model.r#type.clone(),
|
||||||
|
safety: model.safety.clone(),
|
||||||
|
score,
|
||||||
|
favorite_count,
|
||||||
|
comment_count,
|
||||||
|
tags,
|
||||||
|
version: model.version,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct PostAnswer {
|
||||||
|
pub id: i32,
|
||||||
|
pub version: i32,
|
||||||
|
#[serde(rename = "creationTime")]
|
||||||
|
pub creation_time: NaiveDateTime,
|
||||||
|
#[serde(rename = "lastEditTime")]
|
||||||
|
pub last_edit_time: Option<NaiveDateTime>,
|
||||||
|
pub safety: String,
|
||||||
|
pub source: Option<String>,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub type_field: String,
|
||||||
|
#[serde(rename = "mimeType")]
|
||||||
|
pub mime_type: String,
|
||||||
|
pub checksum: String,
|
||||||
|
#[serde(rename = "checksumMD5")]
|
||||||
|
pub checksum_md5: Option<String>,
|
||||||
|
#[serde(rename = "fileSize")]
|
||||||
|
pub file_size: Option<i64>,
|
||||||
|
#[serde(rename = "canvasWidth")]
|
||||||
|
pub canvas_width: Option<i32>,
|
||||||
|
#[serde(rename = "canvasHeight")]
|
||||||
|
pub canvas_height: Option<i32>,
|
||||||
|
#[serde(rename = "contentUrl")]
|
||||||
|
pub content_url: String,
|
||||||
|
#[serde(rename = "thumbnailUrl")]
|
||||||
|
pub thumbnail_url: String,
|
||||||
|
pub flags: Vec<String>,
|
||||||
|
pub tags: Vec<()>, // Vec<Tag>
|
||||||
|
pub relations: Vec<()>,
|
||||||
|
pub user: Option<User>,
|
||||||
|
pub score: i64,
|
||||||
|
#[serde(rename = "ownScore")]
|
||||||
|
pub own_score: i64,
|
||||||
|
#[serde(rename = "ownFavorite")]
|
||||||
|
pub own_favorite: bool,
|
||||||
|
#[serde(rename = "tagCount")]
|
||||||
|
pub tag_count: i64,
|
||||||
|
#[serde(rename = "favoriteCount")]
|
||||||
|
pub favorite_count: i64,
|
||||||
|
#[serde(rename = "commentCount")]
|
||||||
|
pub comment_count: i64,
|
||||||
|
#[serde(rename = "noteCount")]
|
||||||
|
pub note_count: i64,
|
||||||
|
#[serde(rename = "relationCount")]
|
||||||
|
pub relation_count: i64,
|
||||||
|
#[serde(rename = "featureCount")]
|
||||||
|
pub feature_count: i64,
|
||||||
|
#[serde(rename = "lastFeatureTime")]
|
||||||
|
pub last_feature_time: Option<()>,
|
||||||
|
#[serde(rename = "favoritedBy")]
|
||||||
|
pub favorited_by: Vec<()>,
|
||||||
|
#[serde(rename = "hasCustomThumbnail")]
|
||||||
|
pub has_custom_thumbnail: bool,
|
||||||
|
pub notes: Vec<()>,
|
||||||
|
pub comments: Vec<()>,
|
||||||
|
pub pools: Vec<()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// #[derive(Serialize, Deserialize)]
|
||||||
|
// pub struct Tag {
|
||||||
|
// pub names: Vec<String>,
|
||||||
|
// pub category: String,
|
||||||
|
// pub usages: i64,
|
||||||
|
// }
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct User {
|
||||||
|
pub name: String,
|
||||||
|
#[serde(rename = "avatarUrl")]
|
||||||
|
pub avatar_url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct PostsParams {
|
||||||
|
pub query: String,
|
||||||
|
pub offset: Option<u64>,
|
||||||
|
pub limit: u64,
|
||||||
|
pub fields: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ReverseSearchQuery {
|
||||||
|
pub content_token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ReverseSearchAnswer {
|
||||||
|
pub exact_post: Option<String>,
|
||||||
|
pub similar_posts: Vec<()>,
|
||||||
|
}
|
||||||
115
src/api/post/post.rs
Normal file
115
src/api/post/post.rs
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
use axum::{
|
||||||
|
extract::{Path, Query, State},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use log::debug;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
error::{ApiError, ApiResult}, AppState, func::post::*
|
||||||
|
};
|
||||||
|
use super::model::*;
|
||||||
|
|
||||||
|
pub async fn list_of_posts(
|
||||||
|
Query(params): Query<PostsParams>,
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> ApiResult<Json<ListOfPostsAnswer>> {
|
||||||
|
debug!("Post listing params: {params:?}");
|
||||||
|
// let fields_mas = get_fields_from_string(params.fields.clone());
|
||||||
|
// debug!("{:?}", &fields_mas);
|
||||||
|
// oki
|
||||||
|
|
||||||
|
let total = state.db.get_posts_count().await?;
|
||||||
|
let offset = {
|
||||||
|
match params.offset {
|
||||||
|
None => 0,
|
||||||
|
Some(i) => i,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let (results_raw, _) = state.db.get_posts_in_page(offset, params.limit).await?;
|
||||||
|
// PostQuery::find_posts_in_page_with_filter(&state.db, offset, fields_mas, params.limit).await.unwrap();
|
||||||
|
debug!("{results_raw:?}");
|
||||||
|
let mut results: Vec<MiniPost> = Vec::new();
|
||||||
|
for model in results_raw.iter() {
|
||||||
|
results.push(MiniPost::from_model(model, "data/avatarka.jpg".to_string(), 0, 0, 0, Vec::new()))
|
||||||
|
} // TODO: заглушки :(
|
||||||
|
|
||||||
|
let posts = ListOfPostsAnswer {
|
||||||
|
query: params.fields,
|
||||||
|
offset,
|
||||||
|
limit: params.limit,
|
||||||
|
total,
|
||||||
|
results,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(posts))
|
||||||
|
// end
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_post_by_id(
|
||||||
|
Path(id): Path<u64>,
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> ApiResult<Json<PostAnswer>> {
|
||||||
|
let raw_post = state.db.get_post_by_id(id).await?;
|
||||||
|
|
||||||
|
let mut flags: Vec<String> = Vec::new();
|
||||||
|
if raw_post.flags.is_some() {
|
||||||
|
for part in raw_post.flags.unwrap().split(",") {
|
||||||
|
flags.push(part.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let post = PostAnswer {
|
||||||
|
id: raw_post.id,
|
||||||
|
version: raw_post.version,
|
||||||
|
creation_time: raw_post.creation_time,
|
||||||
|
last_edit_time: raw_post.last_edit_time,
|
||||||
|
safety: raw_post.safety,
|
||||||
|
source: raw_post.source,
|
||||||
|
type_field: raw_post.r#type,
|
||||||
|
mime_type: raw_post.mime_type.clone(),
|
||||||
|
checksum: raw_post.checksum,
|
||||||
|
checksum_md5: raw_post.checksum_md5,
|
||||||
|
file_size: raw_post.file_size,
|
||||||
|
canvas_width: raw_post.image_width,
|
||||||
|
canvas_height: raw_post.image_height,
|
||||||
|
content_url: get_post_content_path(raw_post.id, get_post_security_hash(id, &state.config.secret), &raw_post.mime_type),
|
||||||
|
thumbnail_url: get_post_thumbnail_path(raw_post.id, get_post_security_hash(id, &state.config.secret)),
|
||||||
|
flags, // TODO: Дальше чисто заглушки
|
||||||
|
tags: Vec::new(),
|
||||||
|
relations: Vec::new(),
|
||||||
|
user: None,
|
||||||
|
score: 0,
|
||||||
|
own_score: 0,
|
||||||
|
own_favorite: false,
|
||||||
|
tag_count: 0,
|
||||||
|
favorite_count: 0,
|
||||||
|
comment_count: 0,
|
||||||
|
note_count: 0,
|
||||||
|
relation_count: 0,
|
||||||
|
feature_count: 0,
|
||||||
|
last_feature_time: None,
|
||||||
|
favorited_by: Vec::new(),
|
||||||
|
has_custom_thumbnail: false,
|
||||||
|
notes: Vec::new(),
|
||||||
|
comments: Vec::new(),
|
||||||
|
pools: Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(post))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn reverse_post_search(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Json(content_path): Json<ReverseSearchQuery>
|
||||||
|
) -> ApiResult<Json<ReverseSearchAnswer>> {
|
||||||
|
// TODO: Пока что здесь просто заглушка
|
||||||
|
if !state.uploads.lock().expect("Uploads mutex was poisoned!").is_existing(&content_path.content_token) {
|
||||||
|
return Err(ApiError::Uploads);
|
||||||
|
};
|
||||||
|
Ok(Json(ReverseSearchAnswer {
|
||||||
|
exact_post: None,
|
||||||
|
similar_posts: Vec::new(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
30
src/api/test.rs
Normal file
30
src/api/test.rs
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::{error::*, AppState, RequireAuth};
|
||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use log::debug;
|
||||||
|
|
||||||
|
pub async fn test(
|
||||||
|
auth: RequireAuth,
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> ApiResult<Json<Value>> {
|
||||||
|
// TODO: Может в будущем это будет диагностикой
|
||||||
|
|
||||||
|
let state = state.uploads.lock().expect("Uploads poisoned").vec();
|
||||||
|
let body = Json(json!({"auth": format!("{auth:?}"), "uploads": format!("{:?}", state)}));
|
||||||
|
|
||||||
|
debug!("{body:?}");
|
||||||
|
return Ok(body);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn newtest(
|
||||||
|
) -> ApiResult<Json<Value>> {
|
||||||
|
return Err(ApiError::Test(TestError::ItsJustForTest));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn newtest2(
|
||||||
|
) -> ApiResult<Json<Value>> {
|
||||||
|
return Err(ApiError::Test(TestError::SecondEntry));
|
||||||
|
}
|
||||||
134
src/api/user.rs
Normal file
134
src/api/user.rs
Normal file
|
|
@ -0,0 +1,134 @@
|
||||||
|
use std::str::FromStr;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use axum::extract::{Path, Query, State, Json};
|
||||||
|
use chrono::{Local, NaiveDateTime};
|
||||||
|
use serde::{Serialize, Deserialize};
|
||||||
|
use log::debug;
|
||||||
|
use sea_orm::Set;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
db::schemas::user, error::ApiResult, AppState, AvatarStyle, UserRank
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct UserHttpAnswer {
|
||||||
|
pub name: String,
|
||||||
|
#[serde(rename = "creationTime")]
|
||||||
|
pub creation_time: NaiveDateTime,
|
||||||
|
#[serde(rename = "lastLoginTime")]
|
||||||
|
pub last_login_time: Option<NaiveDateTime>,
|
||||||
|
pub version: i32,
|
||||||
|
pub rank: UserRank,
|
||||||
|
#[serde(rename = "avatarStyle")]
|
||||||
|
pub avatar_style: AvatarStyle,
|
||||||
|
#[serde(rename = "avatarUrl")]
|
||||||
|
pub avatar_url: String,
|
||||||
|
#[serde(rename = "commentCount")]
|
||||||
|
pub comment_count: i32,
|
||||||
|
#[serde(rename = "uploadedPostCount")]
|
||||||
|
pub uploaded_post_count: i32,
|
||||||
|
#[serde(rename = "favoritePostCount")]
|
||||||
|
pub favorite_post_count: i32,
|
||||||
|
#[serde(rename = "likedPostCount")]
|
||||||
|
pub liked_post_count: i32,
|
||||||
|
#[serde(rename = "dislikedPostCount")]
|
||||||
|
pub disliked_post_count: i32,
|
||||||
|
pub email: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO! Rework all structs to use 'rename_all = "camelCase"'
|
||||||
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct MicroUser {
|
||||||
|
pub name: String,
|
||||||
|
pub avatar_url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct UserHttpQuery {
|
||||||
|
#[serde(rename = "bump-login")]
|
||||||
|
pub bump_login: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for UserHttpQuery {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self { bump_login: false }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_user(
|
||||||
|
Path(user): Path<String>,
|
||||||
|
params: Option<Query<UserHttpQuery>>,
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> ApiResult<Json<UserHttpAnswer>> {
|
||||||
|
|
||||||
|
let mut raw_user = state.db.get_user_by_name(&user).await?;
|
||||||
|
|
||||||
|
// Update last login time if needed P.S. Лишние операции... так то это всё true false нахуй не надо!
|
||||||
|
let Query(params) = params.unwrap_or_default();
|
||||||
|
if params.bump_login == true {
|
||||||
|
raw_user = state.db.update_last_login_time(&raw_user.name).await?
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(UserHttpAnswer {
|
||||||
|
name: user,
|
||||||
|
creation_time: raw_user.creation_time,
|
||||||
|
last_login_time: raw_user.last_login_time,
|
||||||
|
version: raw_user.version,
|
||||||
|
rank: UserRank::from_str(&raw_user.rank).unwrap(),
|
||||||
|
avatar_style: AvatarStyle::from_str(&raw_user.avatar_style).unwrap(),
|
||||||
|
avatar_url: "data/avatarka.jpg".to_string(), // TODO! Hardcoded shit!
|
||||||
|
comment_count: 0, // TODO!
|
||||||
|
uploaded_post_count: 0, // TODO!
|
||||||
|
favorite_post_count: 0, // TODO!
|
||||||
|
liked_post_count: 0, // TODO!
|
||||||
|
disliked_post_count: 0, // TODO!
|
||||||
|
email: raw_user.email,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct CreateUserHttpQuery {
|
||||||
|
pub name: String,
|
||||||
|
pub password: String,
|
||||||
|
pub email: Option<String>,
|
||||||
|
pub rank: Option<UserRank>,
|
||||||
|
#[serde(rename = "avatarStyle")]
|
||||||
|
pub avatar_style: Option<AvatarStyle>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_user(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Json(params): Json<CreateUserHttpQuery>, // ЭТА ЕБУЧАЯ ХУЙНЯ ДОЛЖНА БЫТЬ ПОСЛЕДНЕЙ, НАВОДИСЬ НА JSON И ПРОЧИТАЙ ПОСЛЕДНЮЮ СТРОКУ СПРАВКИ
|
||||||
|
) -> ApiResult<Json<UserHttpAnswer>> {
|
||||||
|
debug!("Trying to create new user with credentials: {params:?}");
|
||||||
|
let form_data = user::ActiveModel {
|
||||||
|
name: Set(params.name.clone()), // TODO!
|
||||||
|
password_hash: Set("aaa".to_string()), // Переделать под Default
|
||||||
|
password_salt: Set(Some("aaa".to_string())), // За пример взять UserToken
|
||||||
|
email: Set(params.email.clone()),
|
||||||
|
rank: Set(UserRank::Administrator.to_string()),
|
||||||
|
creation_time: Set(Local::now().naive_local().to_owned()),
|
||||||
|
avatar_style: Set(AvatarStyle::Gravatar.to_string()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let created_user = state.db.create_user(form_data).await?;
|
||||||
|
|
||||||
|
let raw_user = state.db.get_user_by_id(created_user.id.unwrap() as u64).await?;
|
||||||
|
Ok(Json(UserHttpAnswer {
|
||||||
|
name: raw_user.name,
|
||||||
|
creation_time: raw_user.creation_time,
|
||||||
|
last_login_time: raw_user.last_login_time,
|
||||||
|
version: raw_user.version,
|
||||||
|
rank: UserRank::from_str(&raw_user.rank).unwrap(),
|
||||||
|
avatar_style: AvatarStyle::from_str(&raw_user.avatar_style).unwrap(),
|
||||||
|
avatar_url: "data/avatarka.jpg".to_string(), // TODO! Hardcoded shit!
|
||||||
|
comment_count: 0, // TODO!
|
||||||
|
uploaded_post_count: 0, // TODO!
|
||||||
|
favorite_post_count: 0, // TODO!
|
||||||
|
liked_post_count: 0, // TODO!
|
||||||
|
disliked_post_count: 0, // TODO!
|
||||||
|
email: raw_user.email,
|
||||||
|
}))
|
||||||
|
}
|
||||||
131
src/api/usertoken.rs
Normal file
131
src/api/usertoken.rs
Normal file
|
|
@ -0,0 +1,131 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use axum::{extract::{Path, State}, Json};
|
||||||
|
use chrono::{DateTime, Local, Months, NaiveDateTime};
|
||||||
|
use sea_orm::Set;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use log::debug;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
db::schemas::user_token, error::{ApiError, ApiResult}, AppState
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::user::MicroUser;
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct UserTokenHttpResponse {
|
||||||
|
pub user: MicroUser,
|
||||||
|
pub token: String,
|
||||||
|
pub note: Option<String>,
|
||||||
|
pub enabled: bool,
|
||||||
|
pub expiration_time: Option<NaiveDateTime>,
|
||||||
|
pub creation_time: NaiveDateTime,
|
||||||
|
pub last_edit_time: Option<NaiveDateTime>,
|
||||||
|
pub last_usage_time: Option<NaiveDateTime>,
|
||||||
|
pub version: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct CreateUserTokenHttpQuery {
|
||||||
|
pub enabled: Option<bool>,
|
||||||
|
pub note: Option<String>,
|
||||||
|
pub expiration_time: Option<DateTime<Local>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Внимание! Строчки дальше отвечают за состояния по умолчанию при создании токена
|
||||||
|
// проблема в том что при логине на фронтенде "expiration_time" не указывается
|
||||||
|
// и будет браться от сюда. TODO: Брать значение offset'a из конфига, а не хардкод!
|
||||||
|
impl Default for CreateUserTokenHttpQuery {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
enabled: Some(true),
|
||||||
|
note: Some("Undefined due creation".to_string()),
|
||||||
|
expiration_time: Some(Local::now().checked_add_months(Months::new(12)).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_usertoken( // POST
|
||||||
|
Path(user): Path<String>,
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Json(params): Json<CreateUserTokenHttpQuery>, // ЭТА ЕБУЧАЯ ХУЙНЯ ДОЛЖНА БЫТЬ ПОСЛЕДНЕЙ, НАВОДИСЬ НА JSON И ПРОЧИТАЙ ПОСЛЕДНЮЮ СТРОКУ СПРАВКИ
|
||||||
|
) -> ApiResult<Json<UserTokenHttpResponse>> {
|
||||||
|
debug!("Trying to create new user-token for '{user}' with params: {params:?}");
|
||||||
|
let user = state.db.get_user_by_name(&user).await?;
|
||||||
|
let form_data = user_token::ActiveModel {
|
||||||
|
user_id: Set(user.id),
|
||||||
|
token: Set(Uuid::new_v4().to_string()),
|
||||||
|
// Ооо! Пресвятой колхоз!
|
||||||
|
note: Set(Some(params.note.unwrap_or(CreateUserTokenHttpQuery::default().note.unwrap()))),
|
||||||
|
enabled: Set(params.enabled.unwrap_or(CreateUserTokenHttpQuery::default().enabled.unwrap())),
|
||||||
|
// Ооо! Пресвятой колхоз со временем!
|
||||||
|
expiration_time: Set(Some(params.expiration_time.unwrap_or(CreateUserTokenHttpQuery::default().expiration_time.unwrap()).naive_utc())),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
debug!("{form_data:?}");
|
||||||
|
state.db.create_user_token(form_data.clone()).await?;
|
||||||
|
let raw_token = state.db.get_user_token(&form_data.token.unwrap()).await?;
|
||||||
|
Ok(Json(UserTokenHttpResponse {
|
||||||
|
user: MicroUser { name: user.name, avatar_url: "data/avatarka.jpg".to_string() }, // FIXME: hardcoded
|
||||||
|
token: raw_token.token,
|
||||||
|
note: raw_token.note,
|
||||||
|
enabled: raw_token.enabled,
|
||||||
|
expiration_time: raw_token.expiration_time,
|
||||||
|
creation_time: raw_token.creation_time,
|
||||||
|
last_edit_time: raw_token.last_edit_time,
|
||||||
|
last_usage_time: raw_token.last_usage_time,
|
||||||
|
version: raw_token.version,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_usertoken(
|
||||||
|
Path((user, token)): Path<(String, String)>,
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> ApiResult<&'static str> {
|
||||||
|
if state.db.get_user_by_name(&user).await?.id == state.db.get_user_token(&token).await?.user_id {
|
||||||
|
state.db.delete_user_token(&token).await?;
|
||||||
|
debug!("Token {token} deleted!")
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::DeleteToken(crate::db::errors::DeleteUserTokenError::TokenUserIdDontMatch));
|
||||||
|
}
|
||||||
|
// Output
|
||||||
|
Ok("{}")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ListUserTokensHttpResponse {
|
||||||
|
pub results: Vec<UserTokenHttpResponse>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_usertokens( // GET
|
||||||
|
Path(user): Path<String>,
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> ApiResult<Json<ListUserTokensHttpResponse>> {
|
||||||
|
let user = state.db.get_user_by_name(&user).await?;
|
||||||
|
let raw_tokens = state.db.get_user_tokens_by_user_id(user.id as u64).await?;
|
||||||
|
let miniuser = MicroUser {
|
||||||
|
name: user.name,
|
||||||
|
avatar_url: "data/avatarka.jpg".to_string(), // FIXME: hardcoded
|
||||||
|
};
|
||||||
|
let mut prepared_tokens: Vec<UserTokenHttpResponse> = Vec::new();
|
||||||
|
for model in raw_tokens.iter() {
|
||||||
|
prepared_tokens.push(UserTokenHttpResponse {
|
||||||
|
user: miniuser.clone(),
|
||||||
|
token: model.token.to_owned(),
|
||||||
|
note: model.note.to_owned(),
|
||||||
|
enabled: model.enabled,
|
||||||
|
expiration_time: model.expiration_time,
|
||||||
|
creation_time: model.creation_time,
|
||||||
|
last_edit_time: model.last_edit_time,
|
||||||
|
last_usage_time: model.last_usage_time,
|
||||||
|
version: model.version,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Ok(Json(ListUserTokensHttpResponse {
|
||||||
|
results: prepared_tokens,
|
||||||
|
}))
|
||||||
|
}
|
||||||
92
src/auth-dw.rs
Normal file
92
src/auth-dw.rs
Normal file
|
|
@ -0,0 +1,92 @@
|
||||||
|
use axum::async_trait;
|
||||||
|
use password_auth::verify_password;
|
||||||
|
use sea_orm::DatabaseConnection;
|
||||||
|
use serde::{Serialize, Deserialize};
|
||||||
|
use axum_login::{self, AuthUser, AuthnBackend, UserId};
|
||||||
|
use tokio::task;
|
||||||
|
|
||||||
|
//
|
||||||
|
// FOR DELETE! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! !
|
||||||
|
//
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize)]
|
||||||
|
pub struct User {
|
||||||
|
id: i32,
|
||||||
|
pub name: String,
|
||||||
|
password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for User {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("User")
|
||||||
|
.field("id", &self.id)
|
||||||
|
.field("username", &self.name)
|
||||||
|
.field("password", &"[redacted]")
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthUser for User {
|
||||||
|
type Id = i32;
|
||||||
|
|
||||||
|
fn id(&self) -> Self::Id {
|
||||||
|
self.id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn session_auth_hash(&self) -> &[u8] {
|
||||||
|
self.password.as_bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Credentials {
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
pub next: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Backend {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Backend {
|
||||||
|
pub fn new(db: DatabaseConnection) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(transparent)]
|
||||||
|
Database(#[from] sea_orm::DbErr),
|
||||||
|
|
||||||
|
#[error(transparent)]
|
||||||
|
TaskJoin(#[from] task::JoinError),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl AuthnBackend for Backend {
|
||||||
|
type User = User;
|
||||||
|
type Credentials = Credentials;
|
||||||
|
type Error = Error;
|
||||||
|
|
||||||
|
async fn authenticate(
|
||||||
|
&self,
|
||||||
|
creds: Self::Credentials
|
||||||
|
) -> Result<Option<Self::User>, Self::Error> {
|
||||||
|
let raw_user = service::UserQuery::find_user_credentials_by_name(&self.db, &creds.username).await?.unwrap();
|
||||||
|
let user = Some(Self::User { id: raw_user.id, name: raw_user.name, password: raw_user.password_hash });
|
||||||
|
task::spawn_blocking(|| {
|
||||||
|
// We're using password-based authentication--this works by comparing our form
|
||||||
|
// input with an argon2 password hash.
|
||||||
|
Ok(user.filter(|user| verify_password(creds.password, &user.password).is_ok()))
|
||||||
|
})
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_user(&self, user_id: &UserId<Self>) -> Result<Option<Self::User>, Self::Error> {
|
||||||
|
let raw_user = service::UserQuery::find_user_credentials_by_id(&self.db, *user_id).await?.unwrap();
|
||||||
|
let user = Some(Self::User { id: raw_user.id, name: raw_user.name, password: raw_user.password_hash });
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
}
|
||||||
106
src/auth.rs
Normal file
106
src/auth.rs
Normal file
|
|
@ -0,0 +1,106 @@
|
||||||
|
use axum::{
|
||||||
|
extract::FromRequestParts,
|
||||||
|
http::{header, StatusCode, request::Parts},
|
||||||
|
async_trait,
|
||||||
|
};
|
||||||
|
use log::debug;
|
||||||
|
use data_encoding::BASE64;
|
||||||
|
|
||||||
|
#[derive(PartialEq, Debug)]
|
||||||
|
pub enum RequireAuth {
|
||||||
|
Basic {
|
||||||
|
name: String,
|
||||||
|
password: String,
|
||||||
|
},
|
||||||
|
Token {
|
||||||
|
name: String,
|
||||||
|
token: String,
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for RequireAuth {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RequireAuth {
|
||||||
|
pub fn is_some(&self) -> bool {
|
||||||
|
!matches!(*self, RequireAuth::None)
|
||||||
|
}
|
||||||
|
pub fn is_none(&self) -> bool {
|
||||||
|
matches!(*self, RequireAuth::None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// pub struct User {
|
||||||
|
// pub id: i32,
|
||||||
|
// pub name: String,
|
||||||
|
// pub rank: UserRank,
|
||||||
|
// }
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<S> FromRequestParts<S> for RequireAuth
|
||||||
|
where
|
||||||
|
S: Send + Sync,
|
||||||
|
{
|
||||||
|
type Rejection = StatusCode;
|
||||||
|
|
||||||
|
async fn from_request_parts(parts: &mut Parts, _: &S) -> Result<Self, Self::Rejection> {
|
||||||
|
let auth_header = parts
|
||||||
|
.headers
|
||||||
|
.get(header::AUTHORIZATION)
|
||||||
|
.and_then(|value| value.to_str().ok());
|
||||||
|
// let auth_cookie; TODO: Узнать на счёт использования кукесов для авторизации
|
||||||
|
debug!("!AUTH {auth_header:?}");
|
||||||
|
|
||||||
|
if auth_header.is_none() {
|
||||||
|
debug!("Don't have Auth: {auth_header:?}");
|
||||||
|
return Ok(Self::default())
|
||||||
|
}
|
||||||
|
let (metod, auth) = match auth_header.unwrap().split_once(' ') {
|
||||||
|
Some((metod, auth)) => (metod, auth),
|
||||||
|
None => ("", auth_header.unwrap()), // В метод ничего, а в ауз то что было в голове.
|
||||||
|
};
|
||||||
|
|
||||||
|
match metod {
|
||||||
|
"Basic" => {
|
||||||
|
let auth = String::from_utf8(BASE64.decode(auth.as_bytes()).unwrap()).unwrap();
|
||||||
|
let (name, password) = auth.split_once(':').unwrap();
|
||||||
|
debug!("!BASIC {name} {password}");
|
||||||
|
Ok(Self::Basic {name: name.to_string(), password: password.to_string()})
|
||||||
|
}
|
||||||
|
"Token" => {
|
||||||
|
let auth = String::from_utf8(BASE64.decode(auth.as_bytes()).unwrap()).unwrap();
|
||||||
|
let (name, token) = auth.split_once(':').unwrap();
|
||||||
|
debug!("!TOKEN {name} {token}");
|
||||||
|
Ok(Self::Token { name: name.to_string(), token: token.to_string() })
|
||||||
|
}
|
||||||
|
_ => { // Попадём сюда в случае если метод пустой, когда там неверные данные например, или если что-то другое.
|
||||||
|
// Boom!
|
||||||
|
debug!("Something other in auth: {auth}");
|
||||||
|
Ok(Self::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// debug!("Auth: {auth_header:?}");
|
||||||
|
// Ok(Self::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::RequireAuth;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_some() {
|
||||||
|
let x = RequireAuth::Basic { name: "".to_string(), password: "".to_string() };
|
||||||
|
assert_eq!(x.is_some(), true)
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn is_none() {
|
||||||
|
let x = RequireAuth::None;
|
||||||
|
assert_eq!(x.is_none(), true)
|
||||||
|
}
|
||||||
|
}
|
||||||
251
src/config.rs
Normal file
251
src/config.rs
Normal file
|
|
@ -0,0 +1,251 @@
|
||||||
|
use std::{io::Read, path::PathBuf};
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::UserRank;
|
||||||
|
|
||||||
|
#[derive(Deserialize, Clone, Debug)]
|
||||||
|
pub struct Config {
|
||||||
|
pub name: String,
|
||||||
|
pub domain: String,
|
||||||
|
pub listen: String,
|
||||||
|
pub secret: String,
|
||||||
|
pub delete_source_files: bool,
|
||||||
|
pub contact_email: String,
|
||||||
|
pub enable_safety: bool,
|
||||||
|
pub tag_name_regex: String,
|
||||||
|
pub tag_category_name_regex: String,
|
||||||
|
pub pool_name_regex: String,
|
||||||
|
pub pool_category_name_regex: String,
|
||||||
|
pub password_regex: String,
|
||||||
|
pub user_name_regex: String,
|
||||||
|
pub allow_broken_uploads: bool,
|
||||||
|
pub webhooks: Option<Vec<String>>,
|
||||||
|
pub default_rank: UserRank,
|
||||||
|
pub thumbnails: Thumbnails,
|
||||||
|
pub smtp: Smtp,
|
||||||
|
pub privileges: Privileges,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||||
|
pub struct Thumbnails {
|
||||||
|
pub avatar_width: u64,
|
||||||
|
pub avatar_height: u64,
|
||||||
|
pub post_width: u64,
|
||||||
|
pub post_height: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||||
|
pub struct Smtp {
|
||||||
|
pub enabled: bool,
|
||||||
|
pub host: String,
|
||||||
|
pub port: u16,
|
||||||
|
pub user: String,
|
||||||
|
pub pass: String,
|
||||||
|
pub from: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||||
|
pub struct Privileges {
|
||||||
|
#[serde(rename = "users:create:self")]
|
||||||
|
pub users_create_self: UserRank,
|
||||||
|
#[serde(rename = "users:create:any")]
|
||||||
|
pub users_create_any: UserRank,
|
||||||
|
#[serde(rename = "users:list")]
|
||||||
|
pub users_list: UserRank,
|
||||||
|
#[serde(rename = "users:view")]
|
||||||
|
pub users_view: UserRank,
|
||||||
|
#[serde(rename = "users:edit:any:name")]
|
||||||
|
pub users_edit_any_name: UserRank,
|
||||||
|
#[serde(rename = "users:edit:any:pass")]
|
||||||
|
pub users_edit_any_pass: UserRank,
|
||||||
|
#[serde(rename = "users:edit:any:email")]
|
||||||
|
pub users_edit_any_email: UserRank,
|
||||||
|
#[serde(rename = "users:edit:any:avatar")]
|
||||||
|
pub users_edit_any_avatar: UserRank,
|
||||||
|
#[serde(rename = "users:edit:any:rank")]
|
||||||
|
pub users_edit_any_rank: UserRank,
|
||||||
|
#[serde(rename = "users:edit:self:name")]
|
||||||
|
pub users_edit_self_name: UserRank,
|
||||||
|
#[serde(rename = "users:edit:self:pass")]
|
||||||
|
pub users_edit_self_pass: UserRank,
|
||||||
|
#[serde(rename = "users:edit:self:email")]
|
||||||
|
pub users_edit_self_email: UserRank,
|
||||||
|
#[serde(rename = "users:edit:self:avatar")]
|
||||||
|
pub users_edit_self_avatar: UserRank,
|
||||||
|
#[serde(rename = "users:edit:self:rank")]
|
||||||
|
pub users_edit_self_rank: UserRank,
|
||||||
|
#[serde(rename = "users:delete:any")]
|
||||||
|
pub users_delete_any: UserRank,
|
||||||
|
#[serde(rename = "users:delete:self")]
|
||||||
|
pub users_delete_self: UserRank,
|
||||||
|
#[serde(rename = "userTokens:list:any")]
|
||||||
|
pub user_tokens_list_any: UserRank,
|
||||||
|
#[serde(rename = "userTokens:list:self")]
|
||||||
|
pub user_tokens_list_self: UserRank,
|
||||||
|
#[serde(rename = "userTokens:create:any")]
|
||||||
|
pub user_tokens_create_any: UserRank,
|
||||||
|
#[serde(rename = "userTokens:create:self")]
|
||||||
|
pub user_tokens_create_self: UserRank,
|
||||||
|
#[serde(rename = "userTokens:edit:any")]
|
||||||
|
pub user_tokens_edit_any: UserRank,
|
||||||
|
#[serde(rename = "userTokens:edit:self")]
|
||||||
|
pub user_tokens_edit_self: UserRank,
|
||||||
|
#[serde(rename = "userTokens:delete:any")]
|
||||||
|
pub user_tokens_delete_any: UserRank,
|
||||||
|
#[serde(rename = "userTokens:delete:self")]
|
||||||
|
pub user_tokens_delete_self: UserRank,
|
||||||
|
#[serde(rename = "posts:create:anonymous")]
|
||||||
|
pub posts_create_anonymous: UserRank,
|
||||||
|
#[serde(rename = "posts:create:identified")]
|
||||||
|
pub posts_create_identified: UserRank,
|
||||||
|
#[serde(rename = "posts:list")]
|
||||||
|
pub posts_list: UserRank,
|
||||||
|
#[serde(rename = "posts:reverseSearch")]
|
||||||
|
pub posts_reverse_search: UserRank,
|
||||||
|
#[serde(rename = "posts:view")]
|
||||||
|
pub posts_view: UserRank,
|
||||||
|
#[serde(rename = "posts:view:featured")]
|
||||||
|
pub posts_view_featured: UserRank,
|
||||||
|
#[serde(rename = "posts:edit:content")]
|
||||||
|
pub posts_edit_content: UserRank,
|
||||||
|
#[serde(rename = "posts:edit:flags")]
|
||||||
|
pub posts_edit_flags: UserRank,
|
||||||
|
#[serde(rename = "posts:edit:notes")]
|
||||||
|
pub posts_edit_notes: UserRank,
|
||||||
|
#[serde(rename = "posts:edit:relations")]
|
||||||
|
pub posts_edit_relations: UserRank,
|
||||||
|
#[serde(rename = "posts:edit:safety")]
|
||||||
|
pub posts_edit_safety: UserRank,
|
||||||
|
#[serde(rename = "posts:edit:source")]
|
||||||
|
pub posts_edit_source: UserRank,
|
||||||
|
#[serde(rename = "posts:edit:tags")]
|
||||||
|
pub posts_edit_tags: UserRank,
|
||||||
|
#[serde(rename = "posts:edit:thumbnail")]
|
||||||
|
pub posts_edit_thumbnail: UserRank,
|
||||||
|
#[serde(rename = "posts:feature")]
|
||||||
|
pub posts_feature: UserRank,
|
||||||
|
#[serde(rename = "posts:delete")]
|
||||||
|
pub posts_delete: UserRank,
|
||||||
|
#[serde(rename = "posts:score")]
|
||||||
|
pub posts_score: UserRank,
|
||||||
|
#[serde(rename = "posts:merge")]
|
||||||
|
pub posts_merge: UserRank,
|
||||||
|
#[serde(rename = "posts:favorite")]
|
||||||
|
pub posts_favorite: UserRank,
|
||||||
|
#[serde(rename = "posts:bulk-edit:tags")]
|
||||||
|
pub posts_bulk_edit_tags: UserRank,
|
||||||
|
#[serde(rename = "posts:bulk-edit:safety")]
|
||||||
|
pub posts_bulk_edit_safety: UserRank,
|
||||||
|
#[serde(rename = "posts:bulk-edit:delete")]
|
||||||
|
pub posts_bulk_edit_delete: UserRank,
|
||||||
|
#[serde(rename = "tags:create")]
|
||||||
|
pub tags_create: UserRank,
|
||||||
|
#[serde(rename = "tags:edit:names")]
|
||||||
|
pub tags_edit_names: UserRank,
|
||||||
|
#[serde(rename = "tags:edit:category")]
|
||||||
|
pub tags_edit_category: UserRank,
|
||||||
|
#[serde(rename = "tags:edit:description")]
|
||||||
|
pub tags_edit_description: UserRank,
|
||||||
|
#[serde(rename = "tags:edit:implications")]
|
||||||
|
pub tags_edit_implications: UserRank,
|
||||||
|
#[serde(rename = "tags:edit:suggestions")]
|
||||||
|
pub tags_edit_suggestions: UserRank,
|
||||||
|
#[serde(rename = "tags:list")]
|
||||||
|
pub tags_list: UserRank,
|
||||||
|
#[serde(rename = "tags:view")]
|
||||||
|
pub tags_view: UserRank,
|
||||||
|
#[serde(rename = "tags:merge")]
|
||||||
|
pub tags_merge: UserRank,
|
||||||
|
#[serde(rename = "tags:delete")]
|
||||||
|
pub tags_delete: UserRank,
|
||||||
|
#[serde(rename = "tagCategories:create")]
|
||||||
|
pub tag_categories_create: UserRank,
|
||||||
|
#[serde(rename = "tagCategories:edit:name")]
|
||||||
|
pub tag_categories_edit_name: UserRank,
|
||||||
|
#[serde(rename = "tagCategories:edit:color")]
|
||||||
|
pub tag_categories_edit_color: UserRank,
|
||||||
|
#[serde(rename = "tagCategories:edit:order")]
|
||||||
|
pub tag_categories_edit_order: UserRank,
|
||||||
|
#[serde(rename = "tagCategories:list")]
|
||||||
|
pub tag_categories_list: UserRank,
|
||||||
|
#[serde(rename = "tagCategories:view")]
|
||||||
|
pub tag_categories_view: UserRank,
|
||||||
|
#[serde(rename = "tagCategories:delete")]
|
||||||
|
pub tag_categories_delete: UserRank,
|
||||||
|
#[serde(rename = "tagCategories:setDefault")]
|
||||||
|
pub tag_categories_set_default: UserRank,
|
||||||
|
#[serde(rename = "pools:create")]
|
||||||
|
pub pools_create: UserRank,
|
||||||
|
#[serde(rename = "pools:edit:names")]
|
||||||
|
pub pools_edit_names: UserRank,
|
||||||
|
#[serde(rename = "pools:edit:category")]
|
||||||
|
pub pools_edit_category: UserRank,
|
||||||
|
#[serde(rename = "pools:edit:description")]
|
||||||
|
pub pools_edit_description: UserRank,
|
||||||
|
#[serde(rename = "pools:edit:posts")]
|
||||||
|
pub pools_edit_posts: UserRank,
|
||||||
|
#[serde(rename = "pools:list")]
|
||||||
|
pub pools_list: UserRank,
|
||||||
|
#[serde(rename = "pools:view")]
|
||||||
|
pub pools_view: UserRank,
|
||||||
|
#[serde(rename = "pools:merge")]
|
||||||
|
pub pools_merge: UserRank,
|
||||||
|
#[serde(rename = "pools:delete")]
|
||||||
|
pub pools_delete: UserRank,
|
||||||
|
#[serde(rename = "poolCategories:create")]
|
||||||
|
pub pool_categories_create: UserRank,
|
||||||
|
#[serde(rename = "poolCategories:edit:name")]
|
||||||
|
pub pool_categories_edit_name: UserRank,
|
||||||
|
#[serde(rename = "poolCategories:edit:color")]
|
||||||
|
pub pool_categories_edit_color: UserRank,
|
||||||
|
#[serde(rename = "poolCategories:list")]
|
||||||
|
pub pool_categories_list: UserRank,
|
||||||
|
#[serde(rename = "poolCategories:view")]
|
||||||
|
pub pool_categories_view: UserRank,
|
||||||
|
#[serde(rename = "poolCategories:delete")]
|
||||||
|
pub pool_categories_delete: UserRank,
|
||||||
|
#[serde(rename = "poolCategories:setDefault")]
|
||||||
|
pub pool_categories_set_default: UserRank,
|
||||||
|
#[serde(rename = "comments:create")]
|
||||||
|
pub comments_create: UserRank,
|
||||||
|
#[serde(rename = "comments:delete:any")]
|
||||||
|
pub comments_delete_any: UserRank,
|
||||||
|
#[serde(rename = "comments:delete:own")]
|
||||||
|
pub comments_delete_own: UserRank,
|
||||||
|
#[serde(rename = "comments:edit:any")]
|
||||||
|
pub comments_edit_any: UserRank,
|
||||||
|
#[serde(rename = "comments:edit:own")]
|
||||||
|
pub comments_edit_own: UserRank,
|
||||||
|
#[serde(rename = "comments:list")]
|
||||||
|
pub comments_list: UserRank,
|
||||||
|
#[serde(rename = "comments:view")]
|
||||||
|
pub comments_view: UserRank,
|
||||||
|
#[serde(rename = "comments:score")]
|
||||||
|
pub comments_score: UserRank,
|
||||||
|
#[serde(rename = "snapshots:list")]
|
||||||
|
pub snapshots_list: UserRank,
|
||||||
|
#[serde(rename = "uploads:create")]
|
||||||
|
pub uploads_create: UserRank,
|
||||||
|
#[serde(rename = "uploads:useDownloader")]
|
||||||
|
pub uploads_use_downloader: UserRank,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config {
|
||||||
|
pub fn parse(path: PathBuf) -> Self {
|
||||||
|
let mut file = std::fs::File::open(path).expect("Access denied or file doesn't exists!");
|
||||||
|
let mut data = String::new();
|
||||||
|
file.read_to_string(&mut data).unwrap();
|
||||||
|
|
||||||
|
toml::from_str(&data).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// #[allow(dead_code)]
|
||||||
|
// impl Default for Config {
|
||||||
|
// fn default() -> Self {
|
||||||
|
// Self {
|
||||||
|
// name: "axumbooru".to_string(),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
99
src/data.rs
Normal file
99
src/data.rs
Normal file
|
|
@ -0,0 +1,99 @@
|
||||||
|
use std::{fmt::Write as _, fs, path::Path};
|
||||||
|
use anyhow::{anyhow, Ok, Result};
|
||||||
|
use axum::body::Bytes;
|
||||||
|
use dashmap::DashMap;
|
||||||
|
use log::{debug, info, warn};
|
||||||
|
use ring::digest;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Data(DashMap<String, Upload>);
|
||||||
|
|
||||||
|
pub const DATA: &str = "./data";
|
||||||
|
pub const AVATARS: &str = "./data/avatars";
|
||||||
|
pub const POSTS: &str = "./data/posts";
|
||||||
|
pub const TEMP: &str = "./data/temporary-uploads";
|
||||||
|
pub const THUMBNAILS: &str = "./data/generated-thumbnails";
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Upload {
|
||||||
|
pub filename: String,
|
||||||
|
pub content_type: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Data {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self::default()
|
||||||
|
}
|
||||||
|
// Working with data dir
|
||||||
|
fn check_and_repair_directory(path: &str) -> Result<()> {
|
||||||
|
if !Path::new(path).is_dir() {
|
||||||
|
warn!("{} not found", path);
|
||||||
|
fs::create_dir(path)?;
|
||||||
|
info!("{} created!", path);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn repair_data() -> Result<()> {
|
||||||
|
debug!("Data Storage repair started!");
|
||||||
|
Data::check_and_repair_directory(DATA)?;
|
||||||
|
Data::check_and_repair_directory(AVATARS)?;
|
||||||
|
Data::check_and_repair_directory(POSTS)?;
|
||||||
|
Data::check_and_repair_directory(TEMP)?;
|
||||||
|
Data::check_and_repair_directory(THUMBNAILS)?;
|
||||||
|
debug!("Data Storage repair complete!");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn flush_temporary_uploads() -> Result<()> {
|
||||||
|
debug!("Flush started!");
|
||||||
|
for file in fs::read_dir("./data/temporary-uploads")? {
|
||||||
|
let file = file?;
|
||||||
|
debug!("Removing {:?}", &file.file_name());
|
||||||
|
fs::remove_file(file.path())?;
|
||||||
|
}
|
||||||
|
debug!("Flushing complete!");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
// Implementing Self
|
||||||
|
pub fn vec(&self) -> Vec<(String, Upload)> {
|
||||||
|
self.0.clone().into_iter().collect()
|
||||||
|
}
|
||||||
|
pub fn get_and_remove(&self, token: &str) -> Option<Upload> {
|
||||||
|
Some(self.0.remove(token)?.to_owned().1)
|
||||||
|
}
|
||||||
|
pub fn is_existing(&self, token: &str) -> bool {
|
||||||
|
self.0.contains_key(token)
|
||||||
|
}
|
||||||
|
pub fn add(&self, extension: &str, content_type: &str, raw: Bytes) -> Result<String> {
|
||||||
|
let binding = digest::digest(&digest::SHA1_FOR_LEGACY_USE_ONLY, &raw);
|
||||||
|
let hash = binding.as_ref();
|
||||||
|
// let hash = String::from_utf8(hash.to_vec())?;
|
||||||
|
let mut token = String::new();
|
||||||
|
for byte in hash {
|
||||||
|
write!(token, "{:02x}", byte).unwrap();
|
||||||
|
}
|
||||||
|
if self.is_existing(&token) {
|
||||||
|
return Err(anyhow!("{token} is existing!"));
|
||||||
|
};
|
||||||
|
let filename = format!("{}.{}", token, extension);
|
||||||
|
fs::write(format!("./data/temporary-uploads/{filename}"), raw)?;
|
||||||
|
self.0.insert(token.clone(), Upload { filename, content_type: content_type.to_owned() });
|
||||||
|
Ok(token)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Data {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self(DashMap::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
|
Работа с временными файлами
|
||||||
|
- Очистить директорию
|
||||||
|
- Получить список в виде Vec
|
||||||
|
- Проверить существование
|
||||||
|
- Добавить новый файл в список и вернуть контент токен
|
||||||
|
- Получить по токену структуру Uploads
|
||||||
|
|
||||||
|
*/
|
||||||
32
src/db/errors.rs
Normal file
32
src/db/errors.rs
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
#[error("Something went wrong.")]
|
||||||
|
pub struct DatabaseError {
|
||||||
|
#[from]
|
||||||
|
source: anyhow::Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
pub enum GetUserError {
|
||||||
|
#[error("There is no user with {name:?} as name.")]
|
||||||
|
UserNotFound {
|
||||||
|
name: String,
|
||||||
|
#[source]
|
||||||
|
source: DatabaseError
|
||||||
|
},
|
||||||
|
#[error("Something went wrong.")]
|
||||||
|
DatabaseError(#[from] DatabaseError),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
pub enum DeleteUserTokenError {
|
||||||
|
#[error("There is no user_token with {token:?} as token.")]
|
||||||
|
TokenNotFound {
|
||||||
|
token: String,
|
||||||
|
#[source]
|
||||||
|
source: DatabaseError
|
||||||
|
},
|
||||||
|
#[error("Token user_id and user doesn't match!")]
|
||||||
|
TokenUserIdDontMatch,
|
||||||
|
#[error("Something went wrong.")]
|
||||||
|
DatabaseError(#[from] DatabaseError),
|
||||||
|
}
|
||||||
3
src/db/mod.rs
Normal file
3
src/db/mod.rs
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
pub mod schemas;
|
||||||
|
pub mod repository;
|
||||||
|
pub mod errors;
|
||||||
280
src/db/repository.rs
Normal file
280
src/db/repository.rs
Normal file
|
|
@ -0,0 +1,280 @@
|
||||||
|
use sea_orm::*;
|
||||||
|
use chrono::Local;
|
||||||
|
|
||||||
|
use crate::db::schemas::{
|
||||||
|
prelude::*,
|
||||||
|
user, user_token, post, snapshot,
|
||||||
|
};
|
||||||
|
use super::errors::*;
|
||||||
|
|
||||||
|
pub fn to_db_error(e: sea_orm::DbErr) -> DatabaseError {
|
||||||
|
DatabaseError::from(anyhow::Error::from(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Repository(DatabaseConnection);
|
||||||
|
|
||||||
|
impl Repository {
|
||||||
|
// Creating Structure
|
||||||
|
pub async fn create(url: String) -> anyhow::Result<Repository> {
|
||||||
|
let pool = Database::connect(url).await?;
|
||||||
|
Ok(Repository(pool))
|
||||||
|
}
|
||||||
|
pub fn with_connection(pool: DatabaseConnection) -> Repository {
|
||||||
|
Repository(pool)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pool(&self) -> DatabaseConnection {
|
||||||
|
self.0.clone()
|
||||||
|
}
|
||||||
|
// User
|
||||||
|
pub async fn get_users_count(&self) -> Result<u64, DatabaseError> {
|
||||||
|
User::find().count(&self.0).await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn get_users_in_page(&self, page: u64, per_page: u64) -> Result<(Vec<user::Model>, u64), DatabaseError> {
|
||||||
|
// Setup paginator
|
||||||
|
let paginator = User::find()
|
||||||
|
.order_by_asc(user::Column::Id)
|
||||||
|
.paginate(&self.0, per_page);
|
||||||
|
let num_pages = paginator.num_pages().await.map_err(to_db_error)?;
|
||||||
|
// Fetch paginator users
|
||||||
|
paginator.fetch_page(page - 1).await.map_err(to_db_error).map(|p| (p, num_pages))
|
||||||
|
}
|
||||||
|
pub async fn get_user_by_id(&self, id: u64) -> Result<user::Model, GetUserError> {
|
||||||
|
Ok(User::find_by_id(id as i32).one(&self.0).await.map_err(to_db_error)?.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("User not found"))})?)
|
||||||
|
}
|
||||||
|
pub async fn get_user_by_name(&self, name: &str) -> Result<user::Model, GetUserError> {
|
||||||
|
let user = User::find()
|
||||||
|
.filter(user::Column::Name.contains(name))
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("User not found"))})?;
|
||||||
|
Ok(user)
|
||||||
|
}
|
||||||
|
pub async fn create_user(&self, user: user::ActiveModel) -> Result<user::ActiveModel, DatabaseError> {
|
||||||
|
user::ActiveModel {
|
||||||
|
creation_time: Set(Local::now().naive_local().to_owned()),
|
||||||
|
version: Set(1),
|
||||||
|
password_revision: Set(3),
|
||||||
|
..user
|
||||||
|
}
|
||||||
|
.save(&self.0)
|
||||||
|
.await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn update_user(&self, id: u64, user: user::ActiveModel) -> Result<user::Model, DatabaseError> {
|
||||||
|
let current_user: user::ActiveModel = User::find_by_id(id as i32)
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("User not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
let user = user.try_into_model().expect("Can't into model");
|
||||||
|
user::ActiveModel {
|
||||||
|
id: current_user.id,
|
||||||
|
name: current_user.name,
|
||||||
|
password_hash: Set(user.password_hash.to_owned()),
|
||||||
|
password_salt: Set(user.password_salt.to_owned()),
|
||||||
|
email: Set(user.email.to_owned()),
|
||||||
|
rank: Set(user.rank.to_owned()),
|
||||||
|
creation_time: current_user.creation_time,
|
||||||
|
last_login_time: current_user.last_login_time,
|
||||||
|
avatar_style: Set(user.avatar_style.to_owned()),
|
||||||
|
version: Set(user.version.to_owned()),
|
||||||
|
password_revision: Set(user.password_revision.to_owned()),
|
||||||
|
}
|
||||||
|
.update(&self.0)
|
||||||
|
.await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn delete_user(&self, id: u64) -> Result<(), DatabaseError> {
|
||||||
|
let user: user::ActiveModel = User::find_by_id(id as i32)
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("User not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
|
||||||
|
user.delete(&self.0).await.map_err(to_db_error)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub async fn update_last_login_time(&self, name: &str) -> Result<user::Model, DatabaseError> {
|
||||||
|
let mut current_user: user::ActiveModel = User::find()
|
||||||
|
.filter(user::Column::Name.contains(name))
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("User not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
current_user.last_login_time = Set(Some(Local::now().naive_local().to_owned()));
|
||||||
|
current_user.update(&self.0).await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
// Post
|
||||||
|
pub async fn get_posts_count(&self) -> Result<u64, DatabaseError> {
|
||||||
|
Post::find().count(&self.0).await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn get_posts_in_page(&self, page: u64, per_page: u64) -> Result<(Vec<post::Model>, u64), DatabaseError> {
|
||||||
|
// Setup paginator
|
||||||
|
let paginator = Post::find()
|
||||||
|
.order_by_asc(post::Column::Id)
|
||||||
|
.paginate(&self.0, per_page);
|
||||||
|
let num_pages = paginator.num_pages().await.map_err(to_db_error)?;
|
||||||
|
// Fetch paginator posts
|
||||||
|
paginator.fetch_page(page).await.map_err(to_db_error).map(|p| (p, num_pages))
|
||||||
|
}
|
||||||
|
pub async fn get_post_by_id(&self, id: u64) -> Result<post::Model, DatabaseError> {
|
||||||
|
Ok(Post::find_by_id(id as i32).one(&self.0).await.map_err(to_db_error)?.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("Post not found"))})?)
|
||||||
|
}
|
||||||
|
pub async fn create_post(&self, post: post::ActiveModel) -> Result<post::ActiveModel, DatabaseError> {
|
||||||
|
post::ActiveModel {
|
||||||
|
creation_time: Set(Local::now().naive_local().to_owned()),
|
||||||
|
..post
|
||||||
|
}
|
||||||
|
.save(&self.0)
|
||||||
|
.await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn update_post(&self, id: u64, post: post::ActiveModel) -> Result<post::Model, DatabaseError> {
|
||||||
|
let posts: post::ActiveModel = Post::find_by_id(id as i32)
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("Post not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
let post = post.try_into_model().expect("Can't into model");
|
||||||
|
post::ActiveModel {
|
||||||
|
id: posts.id,
|
||||||
|
user_id: posts.user_id,
|
||||||
|
creation_time: posts.creation_time,
|
||||||
|
last_edit_time: Set(Some(Local::now().naive_local().to_owned())),
|
||||||
|
safety: Set(post.safety.to_owned()),
|
||||||
|
r#type: Set(post.r#type.to_owned()),
|
||||||
|
checksum: Set(post.checksum.to_owned()),
|
||||||
|
source: posts.source,
|
||||||
|
file_size: posts.file_size,
|
||||||
|
image_width: posts.image_width,
|
||||||
|
image_height: posts.image_height,
|
||||||
|
mime_type: Set(post.mime_type.to_owned()),
|
||||||
|
version: Set(post.version.to_owned()),
|
||||||
|
flags: posts.flags,
|
||||||
|
checksum_md5: posts.checksum_md5,
|
||||||
|
}
|
||||||
|
.update(&self.0)
|
||||||
|
.await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn delete_post(&self, id: u64) -> Result<(), DatabaseError> {
|
||||||
|
let post: post::ActiveModel = Post::find_by_id(id as i32)
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("Post not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
post.delete(&self.0).await.map_err(to_db_error)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
// User Token
|
||||||
|
pub async fn get_user_tokens_count(&self) -> Result<u64, DatabaseError> {
|
||||||
|
UserToken::find().count(&self.0).await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn get_user_tokens_in_page(&self, page: u64, per_page: u64) -> Result<(Vec<user_token::Model>, u64), DatabaseError> {
|
||||||
|
// Setup paginator
|
||||||
|
let paginator = UserToken::find()
|
||||||
|
.order_by_asc(user_token::Column::Id)
|
||||||
|
.paginate(&self.0, per_page);
|
||||||
|
let num_pages = paginator.num_pages().await.map_err(to_db_error)?;
|
||||||
|
// Fetch paginator tokens
|
||||||
|
paginator.fetch_page(page).await.map_err(to_db_error).map(|p| (p, num_pages))
|
||||||
|
}
|
||||||
|
pub async fn get_user_tokens_by_user_id(&self, user_id: u64) -> Result<Vec<user_token::Model>, DatabaseError> {
|
||||||
|
UserToken::find()
|
||||||
|
.filter(user_token::Column::UserId.eq(user_id))
|
||||||
|
.order_by_asc(user_token::Column::Id)
|
||||||
|
.all(&self.0)
|
||||||
|
.await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn get_user_token_by_id(&self, id: u64) -> Result<user_token::Model, DatabaseError> {
|
||||||
|
Ok(UserToken::find_by_id(id as i32).one(&self.0).await.map_err(to_db_error)?.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("UserToken not found"))})?)
|
||||||
|
}
|
||||||
|
pub async fn get_user_token(&self, token: &str) -> Result<user_token::Model, DatabaseError> {
|
||||||
|
Ok(UserToken::find().filter(user_token::Column::Token.contains(token)).one(&self.0).await.map_err(to_db_error)?.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("UserToken not found"))})?)
|
||||||
|
}
|
||||||
|
pub async fn create_user_token(&self, user_token: user_token::ActiveModel) -> Result<user_token::ActiveModel, DatabaseError> {
|
||||||
|
user_token::ActiveModel {
|
||||||
|
creation_time: Set(Local::now().naive_local().to_owned()),
|
||||||
|
last_edit_time: Set(None),
|
||||||
|
last_usage_time: Set(None),
|
||||||
|
version: Set(1),
|
||||||
|
..user_token
|
||||||
|
}
|
||||||
|
.save(&self.0)
|
||||||
|
.await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn update_user_token(&self, id: u64, user_token: user_token::ActiveModel) -> Result<user_token::Model, DatabaseError> {
|
||||||
|
let current_user_token: user_token::ActiveModel = UserToken::find_by_id(id as i32)
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("UserToken not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
let user_token = user_token.try_into_model().expect("Can't into model");
|
||||||
|
user_token::ActiveModel {
|
||||||
|
id: current_user_token.id,
|
||||||
|
user_id: current_user_token.user_id,
|
||||||
|
token: current_user_token.token,
|
||||||
|
note: Set(user_token.note.to_owned()), // Can be updated
|
||||||
|
enabled: Set(user_token.enabled.to_owned()), // Can be updated
|
||||||
|
expiration_time: Set(user_token.expiration_time.to_owned()), // Can be updated
|
||||||
|
creation_time: current_user_token.creation_time,
|
||||||
|
last_edit_time: Set(Some(Local::now().naive_local().to_owned())),
|
||||||
|
last_usage_time: current_user_token.last_usage_time,
|
||||||
|
version: Set(user_token.version.to_owned()), // Can be updated
|
||||||
|
}
|
||||||
|
.update(&self.0)
|
||||||
|
.await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn delete_user_token(&self, token: &str) -> Result<(), DeleteUserTokenError> {
|
||||||
|
let user_token: user_token::ActiveModel = UserToken::find()
|
||||||
|
.filter(user_token::Column::Token.contains(token))
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("UserToken not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
|
||||||
|
user_token.delete(&self.0).await.map_err(to_db_error)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub async fn update_last_token_usage_time(&self, name: &str) -> Result<user_token::Model, DatabaseError> {
|
||||||
|
let mut user_token: user_token::ActiveModel = UserToken::find()
|
||||||
|
.filter(user_token::Column::Token.contains(name))
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("UserToken not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
|
||||||
|
user_token.last_usage_time = Set(Some(Local::now().naive_local().to_owned()));
|
||||||
|
|
||||||
|
user_token.update(&self.0).await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
// Snapshot
|
||||||
|
pub async fn get_snapshots_count(&self) -> Result<u64, DatabaseError> {
|
||||||
|
Snapshot::find().count(&self.0).await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn get_snapshots_in_page(&self, page: u64, per_page: u64) -> Result<(Vec<snapshot::Model>, u64), DatabaseError> {
|
||||||
|
// Setup paginator
|
||||||
|
let paginator = Snapshot::find()
|
||||||
|
.order_by_asc(snapshot::Column::Id)
|
||||||
|
.paginate(&self.0, per_page);
|
||||||
|
let num_pages = paginator.num_pages().await.map_err(to_db_error)?;
|
||||||
|
// Fetch paginator users
|
||||||
|
paginator.fetch_page(page - 1).await.map_err(to_db_error).map(|p| (p, num_pages))
|
||||||
|
}
|
||||||
|
pub async fn create_snapshot(&self, snapshot: snapshot::ActiveModel) -> Result<snapshot::ActiveModel, DatabaseError> {
|
||||||
|
snapshot::ActiveModel {
|
||||||
|
creation_time: Set(Local::now().naive_local().to_owned()),
|
||||||
|
..snapshot
|
||||||
|
}
|
||||||
|
.save(&self.0)
|
||||||
|
.await.map_err(to_db_error)
|
||||||
|
}
|
||||||
|
pub async fn delete_snapshot(&self, id: u64) -> Result<(), DatabaseError> {
|
||||||
|
let snapshot: snapshot::ActiveModel = Snapshot::find_by_id(id as i32)
|
||||||
|
.one(&self.0)
|
||||||
|
.await.map_err(to_db_error)?
|
||||||
|
.ok_or_else(|| {DatabaseError::from(anyhow::anyhow!("Snapshot not found"))})
|
||||||
|
.map(Into::into)?;
|
||||||
|
|
||||||
|
snapshot.delete(&self.0).await.map_err(to_db_error)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
9
src/db/schemas/mod.rs
Normal file
9
src/db/schemas/mod.rs
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.14
|
||||||
|
|
||||||
|
pub mod prelude;
|
||||||
|
pub mod prelude_model;
|
||||||
|
|
||||||
|
pub mod post;
|
||||||
|
pub mod snapshot;
|
||||||
|
pub mod user;
|
||||||
|
pub mod user_token;
|
||||||
45
src/db/schemas/post.rs
Normal file
45
src/db/schemas/post.rs
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.14
|
||||||
|
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||||
|
#[sea_orm(table_name = "post")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: i32,
|
||||||
|
pub user_id: Option<i32>,
|
||||||
|
pub creation_time: DateTime,
|
||||||
|
pub last_edit_time: Option<DateTime>,
|
||||||
|
pub safety: String,
|
||||||
|
pub r#type: String,
|
||||||
|
pub checksum: String,
|
||||||
|
pub source: Option<String>,
|
||||||
|
pub file_size: Option<i64>,
|
||||||
|
pub image_width: Option<i32>,
|
||||||
|
pub image_height: Option<i32>,
|
||||||
|
#[sea_orm(column_name = "mime-type")]
|
||||||
|
pub mime_type: String,
|
||||||
|
pub version: i32,
|
||||||
|
pub flags: Option<String>,
|
||||||
|
pub checksum_md5: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::user::Entity",
|
||||||
|
from = "Column::UserId",
|
||||||
|
to = "super::user::Column::Id",
|
||||||
|
on_update = "NoAction",
|
||||||
|
on_delete = "SetNull"
|
||||||
|
)]
|
||||||
|
User,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::user::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::User.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
6
src/db/schemas/prelude.rs
Normal file
6
src/db/schemas/prelude.rs
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.14
|
||||||
|
|
||||||
|
pub use super::post::Entity as Post;
|
||||||
|
pub use super::snapshot::Entity as Snapshot;
|
||||||
|
pub use super::user::Entity as User;
|
||||||
|
pub use super::user_token::Entity as UserToken;
|
||||||
4
src/db/schemas/prelude_model.rs
Normal file
4
src/db/schemas/prelude_model.rs
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
pub use super::post::Model as Post;
|
||||||
|
pub use super::snapshot::Model as Snapshot;
|
||||||
|
pub use super::user::Model as User;
|
||||||
|
pub use super::user_token::Model as UserToken;
|
||||||
38
src/db/schemas/snapshot.rs
Normal file
38
src/db/schemas/snapshot.rs
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.14
|
||||||
|
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||||
|
#[sea_orm(table_name = "snapshot")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: i32,
|
||||||
|
pub creation_time: DateTime,
|
||||||
|
pub resource_type: String,
|
||||||
|
pub operation: String,
|
||||||
|
pub user_id: Option<i32>,
|
||||||
|
#[sea_orm(column_type = "Binary(BlobSize::Blob(None))", nullable)]
|
||||||
|
pub data: Option<Vec<u8>>,
|
||||||
|
pub resource_name: String,
|
||||||
|
pub resource_pkey: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::user::Entity",
|
||||||
|
from = "Column::UserId",
|
||||||
|
to = "super::user::Column::Id",
|
||||||
|
on_update = "NoAction",
|
||||||
|
on_delete = "SetNull"
|
||||||
|
)]
|
||||||
|
User,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::user::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::User.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
51
src/db/schemas/user.rs
Normal file
51
src/db/schemas/user.rs
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.14
|
||||||
|
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||||
|
#[sea_orm(table_name = "user")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: i32,
|
||||||
|
#[sea_orm(unique)]
|
||||||
|
pub name: String,
|
||||||
|
pub password_hash: String,
|
||||||
|
pub password_salt: Option<String>,
|
||||||
|
pub email: Option<String>,
|
||||||
|
pub rank: String,
|
||||||
|
pub creation_time: DateTime,
|
||||||
|
pub last_login_time: Option<DateTime>,
|
||||||
|
pub avatar_style: String,
|
||||||
|
pub version: i32,
|
||||||
|
pub password_revision: i16,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(has_many = "super::post::Entity")]
|
||||||
|
Post,
|
||||||
|
#[sea_orm(has_many = "super::snapshot::Entity")]
|
||||||
|
Snapshot,
|
||||||
|
#[sea_orm(has_many = "super::user_token::Entity")]
|
||||||
|
UserToken,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::post::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Post.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::snapshot::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::Snapshot.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::user_token::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::UserToken.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
39
src/db/schemas/user_token.rs
Normal file
39
src/db/schemas/user_token.rs
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.14
|
||||||
|
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||||
|
#[sea_orm(table_name = "user_token")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub id: i32,
|
||||||
|
pub user_id: i32,
|
||||||
|
pub token: String,
|
||||||
|
pub note: Option<String>,
|
||||||
|
pub enabled: bool,
|
||||||
|
pub expiration_time: Option<DateTime>,
|
||||||
|
pub creation_time: DateTime,
|
||||||
|
pub last_edit_time: Option<DateTime>,
|
||||||
|
pub last_usage_time: Option<DateTime>,
|
||||||
|
pub version: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::user::Entity",
|
||||||
|
from = "Column::UserId",
|
||||||
|
to = "super::user::Column::Id",
|
||||||
|
on_update = "NoAction",
|
||||||
|
on_delete = "Cascade"
|
||||||
|
)]
|
||||||
|
User,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::user::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::User.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
64
src/error.rs
Normal file
64
src/error.rs
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
use axum::{http::StatusCode, response::{IntoResponse, Response}};
|
||||||
|
use log::error;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::db::errors::{DatabaseError, DeleteUserTokenError, GetUserError};
|
||||||
|
|
||||||
|
pub type ApiResult<T> = Result<T, ApiError>;
|
||||||
|
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
pub enum ApiError {
|
||||||
|
#[error(transparent)]
|
||||||
|
Test(#[from] TestError),
|
||||||
|
#[error(transparent)]
|
||||||
|
Database(#[from] DatabaseError),
|
||||||
|
#[error(transparent)]
|
||||||
|
GetUser(#[from] GetUserError),
|
||||||
|
#[error(transparent)]
|
||||||
|
DeleteToken(#[from] DeleteUserTokenError),
|
||||||
|
#[error("Something went wrong!")]
|
||||||
|
Uploads,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
pub enum TestError {
|
||||||
|
#[error("Its Just For Test")]
|
||||||
|
ItsJustForTest, // TODO: Это только на время разработки
|
||||||
|
#[error("Second error what can be")]
|
||||||
|
SecondEntry, // TODO: И это в том числе!
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoResponse for ApiError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
error!("Error on request: {self}");
|
||||||
|
let description = self.to_string();
|
||||||
|
match self {
|
||||||
|
ApiError::Test(TestError::ItsJustForTest) => internal_server_error("InternalError", "ItsJustForTest", &description),
|
||||||
|
ApiError::Test(TestError::SecondEntry) => internal_server_error("InternalError", "SecondEntry", &description),
|
||||||
|
ApiError::Database(_) => internal_server_error("InternalError", &description, &description),
|
||||||
|
ApiError::GetUser(_) => internal_server_error("InternalError", &description, &description),
|
||||||
|
ApiError::DeleteToken(DeleteUserTokenError::DatabaseError(_)) => internal_server_error("InternalError", &description, &description),
|
||||||
|
ApiError::DeleteToken(DeleteUserTokenError::TokenNotFound { .. }) => internal_server_error("InternalError", &description, &description),
|
||||||
|
ApiError::DeleteToken(DeleteUserTokenError::TokenUserIdDontMatch) => method_not_allowed(),
|
||||||
|
ApiError::Uploads => method_not_allowed(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn internal_server_error(name: &str, title: &str, description: &str) -> Response {
|
||||||
|
(
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
[("Content-Type", "application/json")],
|
||||||
|
json!({
|
||||||
|
"name": name,
|
||||||
|
"title": title,
|
||||||
|
"description": description,
|
||||||
|
}).to_string(),
|
||||||
|
).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn method_not_allowed() -> Response {
|
||||||
|
(
|
||||||
|
StatusCode::METHOD_NOT_ALLOWED,
|
||||||
|
).into_response()
|
||||||
|
}
|
||||||
1
src/func/mod.rs
Normal file
1
src/func/mod.rs
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
pub mod post;
|
||||||
26
src/func/post.rs
Normal file
26
src/func/post.rs
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
use std::fmt::Display;
|
||||||
|
use md5::Md5;
|
||||||
|
use hmac::{Hmac, Mac};
|
||||||
|
|
||||||
|
type HmacMd5 = Hmac<Md5>;
|
||||||
|
|
||||||
|
pub fn get_post_security_hash<T: ToString>(id: T, key: &str) -> String {
|
||||||
|
use std::fmt::Write;
|
||||||
|
let mut mac = HmacMd5::new_from_slice(key.as_bytes()).expect("Something wrong with HMAC key!");
|
||||||
|
mac.update(id.to_string().as_bytes());
|
||||||
|
let code = &mac.finalize().into_bytes()[0 .. 8]; // wtf how this work but im need 16 chars, and 8 getting 16 chars
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in code {
|
||||||
|
write!(result, "{:02x}", byte).unwrap();
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_post_content_path<T: Display>(id: T, hash: String, mime: &str) -> String {
|
||||||
|
let extension = mime_guess2::get_mime_extensions_str(mime).expect("Unknown mime type!")[0];
|
||||||
|
format!("data/posts/{id}_{hash}.{extension}").to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_post_thumbnail_path<T: Display>(id: T, hash: String) -> String {
|
||||||
|
format!("data/generated-thumbnails/{id}_{hash}.jpg").to_string()
|
||||||
|
}
|
||||||
306
src/main.rs
Normal file
306
src/main.rs
Normal file
|
|
@ -0,0 +1,306 @@
|
||||||
|
use axum::{
|
||||||
|
extract::DefaultBodyLimit, middleware::from_extractor, routing::{delete, get, post}, Router
|
||||||
|
};
|
||||||
|
use dotenvy::dotenv;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tower_http::trace::TraceLayer;
|
||||||
|
use std::{path::PathBuf, str::FromStr, sync::{Arc, Mutex}};
|
||||||
|
use log::{debug, error, info, trace};
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
pub mod config;
|
||||||
|
pub use config::Config;
|
||||||
|
|
||||||
|
// API
|
||||||
|
pub mod api;
|
||||||
|
|
||||||
|
// Functions
|
||||||
|
pub mod func;
|
||||||
|
|
||||||
|
// Error
|
||||||
|
pub mod error;
|
||||||
|
|
||||||
|
// Authentication
|
||||||
|
pub mod auth;
|
||||||
|
pub use auth::RequireAuth;
|
||||||
|
|
||||||
|
// Database
|
||||||
|
pub mod db;
|
||||||
|
use db::repository::Repository;
|
||||||
|
|
||||||
|
// Image Storage
|
||||||
|
pub mod data;
|
||||||
|
use data::Data;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct AppState {
|
||||||
|
db: Repository,
|
||||||
|
config: Config,
|
||||||
|
uploads: Mutex<Data>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
dotenv().ok();
|
||||||
|
env_logger::init();
|
||||||
|
|
||||||
|
std::panic::set_hook(Box::new(|x| {
|
||||||
|
error!("{x}")
|
||||||
|
}));
|
||||||
|
|
||||||
|
debug!("Current dir: {:?}", std::env::current_dir());
|
||||||
|
|
||||||
|
let db_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||||
|
|
||||||
|
Data::repair_data().unwrap();
|
||||||
|
Data::flush_temporary_uploads().unwrap();
|
||||||
|
|
||||||
|
// set up connection pool
|
||||||
|
// let mut opt = ConnectOptions::new(db_url);
|
||||||
|
// opt.sqlx_logging(true)
|
||||||
|
// .sqlx_logging_level(log::LevelFilter::Trace);
|
||||||
|
|
||||||
|
// let state = Arc::new(AppState {
|
||||||
|
// db: Database::connect(opt)
|
||||||
|
// .await
|
||||||
|
// .expect("Database connection error!"),
|
||||||
|
// config: Config::parse(PathBuf::from_str("booruconfig.toml").unwrap()),
|
||||||
|
// uploads: Mutex::new(HashMap::new()),
|
||||||
|
// });
|
||||||
|
let state = Arc::new(AppState {
|
||||||
|
db: Repository::create(db_url)
|
||||||
|
.await
|
||||||
|
.expect("Database connection error!"),
|
||||||
|
config: Config::parse(PathBuf::from_str("booruconfig.toml").unwrap()),
|
||||||
|
uploads: Mutex::new(Data::new()),
|
||||||
|
});
|
||||||
|
|
||||||
|
let listen = state.config.listen.clone();
|
||||||
|
|
||||||
|
debug!("State ready!");
|
||||||
|
trace!("Data:\n{:?}", state);
|
||||||
|
|
||||||
|
let app = Router::new()
|
||||||
|
.route("/test", get(api::test::test))
|
||||||
|
.route("/test1", get(api::test::newtest))
|
||||||
|
.route("/test2", get(api::test::newtest2))
|
||||||
|
// TODO: Удалить мусор выше
|
||||||
|
.route("/posts/", get(api::post::list_of_posts))
|
||||||
|
.route("/posts/reverse-search", post(api::post::reverse_post_search))
|
||||||
|
.route("/post/:id", get(api::post::get_post_by_id))
|
||||||
|
.route("/user/:user", get(api::user::get_user))
|
||||||
|
.route("/user-tokens/:user", get(api::usertoken::list_usertokens))
|
||||||
|
.route("/user-token/:user", post(api::usertoken::create_usertoken))
|
||||||
|
.route("/user-token/:user/:token", delete(api::usertoken::delete_usertoken))
|
||||||
|
.route("/users", post(api::user::create_user))
|
||||||
|
.route("/uploads", post(api::data::upload).layer(DefaultBodyLimit::max(1073741824))) // 1 GB
|
||||||
|
// TODO: Брать значение на максимально возможный для загрузки файл из конфига
|
||||||
|
.route_layer(from_extractor::<RequireAuth>()) // Auth, functions lower doesn't require it.
|
||||||
|
.route("/info", get(api::info::server_info))
|
||||||
|
.fallback_service(api::data::data_static())
|
||||||
|
.with_state(state)
|
||||||
|
.layer(TraceLayer::new_for_http());
|
||||||
|
|
||||||
|
let listener = tokio::net::TcpListener::bind(listen)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
info!("Listening on {}", listener.local_addr().unwrap());
|
||||||
|
axum::serve(listener, app)
|
||||||
|
.with_graceful_shutdown(shutdown_signal())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
info!("Serve stopped. Closing...");
|
||||||
|
}
|
||||||
|
|
||||||
|
// async fn handler() -> Result<(), AppError> {
|
||||||
|
// try_thing()?;
|
||||||
|
// Ok(())
|
||||||
|
// }
|
||||||
|
// fn try_thing() -> Result<(), anyhow::Error> {
|
||||||
|
// anyhow::bail!("epic fail!")
|
||||||
|
// }
|
||||||
|
|
||||||
|
async fn shutdown_signal() {
|
||||||
|
let ctrl_c = async {
|
||||||
|
tokio::signal::ctrl_c()
|
||||||
|
.await
|
||||||
|
.expect("failed to install Ctrl+C handler");
|
||||||
|
};
|
||||||
|
#[cfg(unix)]
|
||||||
|
let terminate = async {
|
||||||
|
tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
|
||||||
|
.expect("failed to install signal handler")
|
||||||
|
.recv()
|
||||||
|
.await;
|
||||||
|
};
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
let terminate = std::future::pending::<()>();
|
||||||
|
tokio::select! {
|
||||||
|
() = ctrl_c => {},
|
||||||
|
() = terminate => {},
|
||||||
|
}
|
||||||
|
info!("Terminate signal received");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||||
|
pub enum UserRank {
|
||||||
|
#[serde(rename = "administrator")]
|
||||||
|
Administrator,
|
||||||
|
#[serde(rename = "moderator")]
|
||||||
|
Moderator,
|
||||||
|
#[serde(rename = "power")]
|
||||||
|
Power,
|
||||||
|
#[serde(rename = "regular")]
|
||||||
|
Regular,
|
||||||
|
#[serde(rename = "restricted")]
|
||||||
|
Restricted,
|
||||||
|
#[serde(rename = "anonymous")]
|
||||||
|
Anonymous,
|
||||||
|
#[serde(rename = "nobody")]
|
||||||
|
Nobody,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for UserRank {
|
||||||
|
fn from_str(str: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
|
match str {
|
||||||
|
"administrator" => Ok(Self::Administrator),
|
||||||
|
"moderator" => Ok(Self::Moderator),
|
||||||
|
"power" => Ok(Self::Power),
|
||||||
|
"regular" => Ok(Self::Regular),
|
||||||
|
"restricted" => Ok(Self::Restricted),
|
||||||
|
"anonymous" => Ok(Self::Anonymous),
|
||||||
|
"nobody" => Ok(Self::Nobody),
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Err = ();
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToString for UserRank {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
match self {
|
||||||
|
UserRank::Administrator => String::from("administrator"),
|
||||||
|
UserRank::Moderator => String::from("moderator"),
|
||||||
|
UserRank::Power => String::from("power"),
|
||||||
|
UserRank::Regular => String::from("regular"),
|
||||||
|
UserRank::Restricted => String::from("restricted"),
|
||||||
|
UserRank::Anonymous => String::from("anonymous"),
|
||||||
|
UserRank::Nobody => String::from("nobody"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||||
|
pub enum AvatarStyle {
|
||||||
|
#[serde(rename = "gravatar")]
|
||||||
|
Gravatar,
|
||||||
|
#[serde(rename = "manual")]
|
||||||
|
Manual,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for AvatarStyle {
|
||||||
|
fn from_str(str: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
|
match str {
|
||||||
|
"gravatar" => Ok(Self::Gravatar),
|
||||||
|
"manual" => Ok(Self::Manual),
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Err = ();
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToString for AvatarStyle {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
match self {
|
||||||
|
AvatarStyle::Gravatar => String::from("gravatar"),
|
||||||
|
AvatarStyle::Manual => String::from("manual"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// #[allow(dead_code)]
|
||||||
|
// #[derive(Serialize, Debug, thiserror::Error)]
|
||||||
|
// enum ApiErrorTypes {
|
||||||
|
// MissingRequiredFileError,
|
||||||
|
// MissingRequiredParameterError,
|
||||||
|
// InvalidParameterError,
|
||||||
|
// IntegrityError,
|
||||||
|
// SearchError,
|
||||||
|
// AuthError,
|
||||||
|
// PostNotFoundError,
|
||||||
|
// PostAlreadyFeaturedError,
|
||||||
|
// PostAlreadyUploadedError,
|
||||||
|
// InvalidPostIdError,
|
||||||
|
// InvalidPostSafetyError,
|
||||||
|
// InvalidPostSourceError,
|
||||||
|
// InvalidPostContentError,
|
||||||
|
// InvalidPostRelationError,
|
||||||
|
// InvalidPostNoteError,
|
||||||
|
// InvalidPostFlagError,
|
||||||
|
// InvalidFavoriteTargetError,
|
||||||
|
// InvalidCommentIdError,
|
||||||
|
// CommentNotFoundError,
|
||||||
|
// EmptyCommentTextError,
|
||||||
|
// InvalidScoreTargetError,
|
||||||
|
// InvalidScoreValueError,
|
||||||
|
// TagCategoryNotFoundError,
|
||||||
|
// TagCategoryAlreadyExistsError,
|
||||||
|
// TagCategoryIsInUseError,
|
||||||
|
// InvalidTagCategoryNameError,
|
||||||
|
// InvalidTagCategoryColorError,
|
||||||
|
// TagNotFoundError,
|
||||||
|
// TagAlreadyExistsError,
|
||||||
|
// TagIsInUseError,
|
||||||
|
// InvalidTagNameError,
|
||||||
|
// InvalidTagRelationError,
|
||||||
|
// InvalidTagCategoryError,
|
||||||
|
// InvalidTagDescriptionError,
|
||||||
|
// UserNotFoundError,
|
||||||
|
// UserAlreadyExistsError,
|
||||||
|
// InvalidUserNameError,
|
||||||
|
// InvalidEmailError,
|
||||||
|
// InvalidPasswordError,
|
||||||
|
// InvalidRankError,
|
||||||
|
// InvalidAvatarError,
|
||||||
|
// ProcessingError,
|
||||||
|
// ValidationError,
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// #[derive(Serialize)]
|
||||||
|
// struct ApiErrorResponse {
|
||||||
|
// name: String,
|
||||||
|
// title: String,
|
||||||
|
// description: String,
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // Make our own error that wraps `anyhow::Error`.
|
||||||
|
// struct AppError(anyhow::Error);
|
||||||
|
//
|
||||||
|
// // Tell axum how to convert `AppError` into a response.
|
||||||
|
// impl IntoResponse for AppError {
|
||||||
|
// fn into_response(self) -> Response {
|
||||||
|
// (
|
||||||
|
// StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
// [("Content-Type", "application/json")],
|
||||||
|
// serde_json::to_string_pretty(&ApiErrorResponse {
|
||||||
|
// name: "ValidationError".to_string(),
|
||||||
|
// title: self.0.to_string(),
|
||||||
|
// description: "Error".to_string(),
|
||||||
|
// })
|
||||||
|
// .unwrap(),
|
||||||
|
// ).into_response()
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // This enables using `?` on functions that return `Result<_, anyhow::Error>` to turn them into
|
||||||
|
// // `Result<_, AppError>`. That way you don't need to do that manually.
|
||||||
|
// impl<E> From<E> for AppError
|
||||||
|
// where
|
||||||
|
// E: Into<anyhow::Error>,
|
||||||
|
// {
|
||||||
|
// fn from(err: E) -> Self {
|
||||||
|
// Self(err.into())
|
||||||
|
// }
|
||||||
|
// }
|
||||||
39
tests/service_tests.rs
Normal file
39
tests/service_tests.rs
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
// use dotenvy::dotenv;
|
||||||
|
// use sea_orm::{ConnectOptions, Database};
|
||||||
|
// use service::PostQuery;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// #[tokio::test]
|
||||||
|
// async fn query_with_filter() {
|
||||||
|
// dotenv().ok();
|
||||||
|
// // tracing_subscriber::fmt::init();
|
||||||
|
|
||||||
|
// let db_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||||
|
// let opt = ConnectOptions::new(db_url);
|
||||||
|
// let db = Database::connect(opt).await.expect("Database connection error!");
|
||||||
|
|
||||||
|
// let fields_mas = [true, false, false, false, true, true, false, false, false, false, false, false, true, false, false];
|
||||||
|
|
||||||
|
// let (results_raw, _) = PostQuery::find_posts_in_page_with_filter(&db, 0, fields_mas, 42)
|
||||||
|
// .await
|
||||||
|
// .unwrap();
|
||||||
|
|
||||||
|
// println!("{results_raw:?}")
|
||||||
|
// }
|
||||||
|
|
||||||
|
// #[tokio::test]
|
||||||
|
// async fn query_without_filter() {
|
||||||
|
// dotenv().ok();
|
||||||
|
// // tracing_subscriber::fmt::init();
|
||||||
|
|
||||||
|
// let db_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||||
|
// let opt = ConnectOptions::new(db_url);
|
||||||
|
// let db = Database::connect(opt).await.expect("Database connection error!");
|
||||||
|
|
||||||
|
// let (results_raw, _) = PostQuery::find_posts_in_page(&db, 0, 42)
|
||||||
|
// .await
|
||||||
|
// .unwrap();
|
||||||
|
|
||||||
|
// println!("{results_raw:?}")
|
||||||
|
// }
|
||||||
Reference in a new issue