diff --git a/.travis.yml b/.travis.yml
index ad8a21b..99a18e5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -13,4 +13,4 @@ before_install:
before_script:
- echo -e "Host $REMOTE_ADDRESS\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config
after_success:
-- bash deploy.sh
+- bash deploy.sh
\ No newline at end of file
diff --git a/services/Browser.js b/Browser/index.js
similarity index 70%
rename from services/Browser.js
rename to Browser/index.js
index da64082..7ea5569 100644
--- a/services/Browser.js
+++ b/Browser/index.js
@@ -20,10 +20,11 @@ class Browser {
}
/**
- * Create new disabled asset page.
+ * Create new optimized asset page.
*/
async newOptimizedPage() {
const page = await this.browser.newPage()
+ page.setDefaultTimeout(60000)
await page.setRequestInterception(true)
page.on('request', (req) => {
@@ -46,10 +47,31 @@ class Browser {
const { browserContextId } = await this.browser._connection.send('Target.createBrowserContext')
const page = await this.browser._createPageInContext(browserContextId)
page.browserContextId = browserContextId
+ page.setDefaultTimeout(60000)
+
+ page.on('request', (req) => {
+ if (req.resourceType() == 'stylesheet' || req.resourceType() == 'font' || req.resourceType() == 'image') {
+ req.abort()
+ } else {
+ req.continue()
+ }
+ })
return page
}
+ /**
+ * Get new tab page instance.
+ * @param page current page.
+ */
+ async getNewTabPage(page) {
+ const pageTarget = page.target()
+ const newTarget = await this.browser.waitForTarget(target => target.opener() === pageTarget)
+ const newPage = await newTarget.page()
+
+ return newPage
+ }
+
/**
* Close a page, use this function to close a page that has context.
*
@@ -68,26 +90,26 @@ class Browser {
* Wait for a selector and then return one element
* of that selector.
*
- * @param {Object} element Element handle
+ * @param {Object} page Browser page
* @param {String} selector Selector
*/
- async waitAndGetSelector(element, selector) {
- await element.waitForSelector(selector)
+ async $waitAndGet(page, selector) {
+ await page.waitForSelector(selector)
- return await element.$(selector)
+ return await page.$(selector)
}
/**
* Wait for a selector and then return all element
* of that selector.
*
- * @param {Object} element Element handle
+ * @param {Object} page Browser page
* @param {String} selector Selector
*/
- async waitAndGetSelectors(element, selector) {
- await element.waitForSelector(selector)
+ async $$waitAndGet(page, selector) {
+ await page.waitForSelector(selector)
- return await element.$$(selector)
+ return await page.$$(selector)
}
/**
diff --git a/LICENSE b/LICENSE
index e72bfdd..b3f1d8b 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,674 +1,21 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
-
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Use with the GNU Affero General Public License.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-.
\ No newline at end of file
+MIT License
+
+Copyright (c) 2020 Gegeh Prast
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/README.md b/README.md
index 3ed3be7..e74bbc0 100644
--- a/README.md
+++ b/README.md
@@ -6,326 +6,50 @@ Aplikasi untuk meng-crawl situs fastsub/fanshare Indonesia. Tujuan utamanya adal
Kunjungi https://shallty.moe/ untuk melihat satu contoh yang bisa dicapai menggunakan aplikasi ini.
-## Instalasi
-1. Instal [node.js](https://nodejs.org/en/).
-
-2. Download [rilisan terbaru Shallty](https://github.com/gegehprast/shallty/releases).
-
-3. Unzip dan masuk ke root direktori lalu jalankan `npm install`.
-
-4. Rename file `config.example.json` menjadi `config.json`.
-
-5. Sesuaikan `config.json`, misalnya ganti `app_env` menjadi `local` untuk bisa melihat prosesnya secara langsung.
-
-6. Jalankan perintah `node index.js` di terminal untuk memulai aplikasi. Kamu akan mendapatkan pesan server dan crawler ready jika tidak ada masalah.
-
-
-## Penggunaan
-### Dasar
-
-- API path: `localhost:8080/api` (default port)
-
-- Semua request melalui GET method
-
-- Semua parameter `url` harus di-encode terlebih dahulu
-
-### Endpoint
-
-**/moenime/animeList?show={type}**
-
-Keterangan: Mengambil daftar anime di halaman anime list (https://moenime.id/daftar-anime-baru/).
-
-Parameter:
-
-- `type` - tipe anime (movie, ongoing) (optional)
-
-Contoh: `/moenime/animeList?show=movie`
-
-
-
-**/moenime/episodes?link={url}**
-
-Keterangan: Mengambil daftar episode di halaman anime.
-
-Parameter:
-
-- `url` - url halaman anime, tanpa domain (Contoh: /boku-no-hero-academia-season-4-sub-indo/)
-
-Contoh: `/moenime/episodes?link=%2Fboku-no-hero-academia-season-4-sub-indo%2F`
-
-
-
-**/moenime/newReleases**
-
-Keterangan: Mengambil daftar rilisan terbaru.
-
-Contoh: `/moenime/newReleases`
-
-
-
-**/moenime/teknoku?link={url}**
-
-Keterangan: Mengambil tautan unduh asli dari teknoku.
-
-Parameter:
-
-- `url` - url shortlink teknoku (Contoh: https://teknoku.me/?id=eXRFbEsvdkFhNDQzeUZpV3B...)
-
-Contoh: `/moenime/teknoku?link=https%3A%2F%2Fteknoku.me%2F%3Fid%3DeXRFbEsvdkFhNDQzeUZpV3BjdTA4RmVDQ0JvbEZ6dVR0RzVWR0w5aUtYK20wVlFnVUltMklQbFVTdmE1MFYwSnptanROVVoraWhIMGd3b1o0bU5MeFE9PQ%3D%3D`
-
-
-
-**/kiryuu/mangaList**
-
-Keterangan: Mengambil daftar manga.
-
-Contoh: `/kiryuu/mangaList`
-
-
-
-**/kiryuu/mangaInfo?link={url}**
-
-Keterangan: Mengambil informasi manga.
-
-Parameter:
-
-- `url` - url halaman manga tanpa domain (Contoh: /manga/iron-ladies/)
-
-Contoh: `/kiryuu/mangaInfo?link=%2Fmanga%2Firon-ladies%2F`
-
-
-
-**/kiryuu/chapters?link={url}**
-
-Keterangan: Mengambil daftar chapter manga.
-
-Parameter:
-
-- `url` - url halaman manga tanpa domain (Contoh: /manga/iron-ladies/)
-
-Contoh: `/kiryuu/chapters?link=%2Fmanga%2Firon-ladies`
-
-
-
-**/kiryuu/images?link={url}**
-
-Keterangan: Mengambil daftar gambar dari satu chapter manga.
-
-Parameter:
-
-- `url` - url halaman chapter manga tanpa domain (Contoh: /iron-ladies-chapter-99-bahasa-indonesia/)
-
-Contoh: `/kiryuu/images?link=%2Firon-ladies-chapter-99-bahasa-indonesia%2F`
-
-
-
-**/kiryuu/newReleases**
-
-Keterangan: Mengambil daftar rilisan terbaru dari Kiryuu.
-
-Contoh: `/kiryuu/newReleases`
-
-
-
-**/samehadaku/anime?link={url}**
-
-Keterangan: Mengambil daftar episode untuk 1 halaman kategori anime.
+## Fitur
-Parameter:
+### Anime
-- `url` - url halaman kategori (Contoh: https://www.samehadaku.tv/anime/dragon-ball-heroes/)
+- Kusonime
-Contoh: `/samehadaku/anime?link=https%3A%2F%2Fwww.samehadaku.tv%2Fanime%2Fdragon-ball-heroes%2F`
-
-
-
-**/samehadaku/checkOnGoingPage**
+- Moenime
-Keterangan: Mengambil daftar rilisan terbaru.
+- Neonime
-Contoh: `/samehadaku/checkOnGoingPage`
-
-
-
-**/samehadaku/getDownloadLinks?link={url}**
+- Oploverz
-Keterangan: Mengambil daftar tautan unduh dari halaman episode.
+- Samehadaku
-Parameter:
+### Manga
-- `url` - url halaman episode (Contoh: https://www.samehadaku.tv/dragon-ball-heroes-episode-15/)
+- Kiryuu
-Contoh: `/samehadaku/getDownloadLinks?link=https%3A%2F%2Fwww.samehadaku.tv%2Fdragon-ball-heroes-episode-15%2F`
-
-
-
-**/samehadaku/tetew?link={url}**
+### Shortlink
-Keterangan: Mengambil tautan unduh asli dari shortlink tetew.com (sekarang anjay.info).
+- Ahexa
+- Anjay
+- Hexa
+- Hightech
+- etc, etc.
-Parameter:
-
-- `url` - url shortlink tetew / anjay (Contoh: https://anjay.info/?id=VWErNWlBZmpCUlMvT0pxVH...)
-
-Contoh: `/samehadaku/tetew?link=https%3A%2F%2Fanjay.info%2F%3Fid%3DVWErNWlBZmpCUlMvT0pxVHE3YS84c2Q0dExOcGF2M1lSam5GdEdDZnpmSnR4dmxrLzMrYXFNaGxadnZDTHBMag%3D%3D`
-
-
-
-**/samehadaku/njiir?link={url}**
-
-Keterangan: Lihat bagian `/samehadaku/tetew?link={url}` di atas.
-
-
-
-**/neonime/checkOnGoingPage**
-
-Keterangan: Mengambil daftar rilisan terbaru.
-
-Contoh: `/neonime/checkOnGoingPage`
-
-
-
-**/neonime/animeList**
-
-Keterangan: Mengambil daftar anime di halaman anime list (https://neonime.org/episode/).
-
-Contoh: `/neonime/animeList`
-
-
-
-**/neonime/tvShow?link={url}**
-
-Keterangan: Mengambil daftar episode dari halaman tv show.
-
-Parameter:
-
-- `url` - url halaman tv show (Contoh: https://neonime.org/tvshows/black-clover-subtitle-indonesia/)
-
-Contoh: `/neonime/tvShow?link=https%3A%2F%2Fneonime.org%2Ftvshows%2Fblack-clover-subtitle-indonesia%2F`
-
-
-
-**/neonime/getEpisodes?link={url}**
-
-Keterangan: Mengambil daftar tautan unduh dari halaman episode tv show.
-
-Parameter:
-
-- `url` - url halaman episode tv show (Contoh: https://neonime.org/episode/black-clover-1x107/)
-
-Contoh: `/neonime/getEpisodes?link=https%3A%2F%2Fneonime.org%2Fepisode%2Fblack-clover-1x107%2F`
-
-
-
-**/neonime/getBatchEpisodes?link={url}**
-
-Keterangan: Mengambil daftar tautan unduh dari halaman episode batch.
-
-Parameter:
-
-- `url` - url shortlink hightech (Contoh: https://neonime.org/batch/chihayafuru-season-2-bd-batch-subtitle-indonesia/)
-
-Contoh: `/neonime/getBatchEpisodes?link=https%3A%2F%2Fneonime.org%2Fbatch%2Fchihayafuru-season-2-bd-batch-subtitle-indonesia%2F`
-
-
-
-**/neonime/hightech?link={url}**
-
-Keterangan: Mengambil tautan unduh asli dari hightech (sekarang xmaster.xyz).
-
-Parameter:
-
-- `url` - url shortlink hightech (Contoh: https://xmaster.xyz/?sitex=aHR0cHM6Ly93d3c3OS56aXBwe...)
-
-Contoh: `/neonime/hightech?link=https%3A%2F%2Fxmaster.xyz%2F%3Fsitex%3DaHR0cHM6Ly93d3c3OS56aXBweXNoYXJlLmNvbS92LzFkNDZ3eWk3L2ZpbGUuaHRtbA%3D%3D`
-
-
-
-**/oploverz/checkOnGoingPage**
-
-Keterangan: Mengambil daftar rilisan terbaru.
-
-Contoh: `/oploverz/checkOnGoingPage`
-
-
-
-**/oploverz/series?link={url}**
-
-Keterangan: Mengambil daftar episode dari halaman series.
-
-Parameter:
-
-- `url` - url series (Contoh: https://www.oploverz.in/series/diamond-no-ace-s3/)
-
-Contoh: `/oploverz/series?link=https%3A%2F%2Fwww.oploverz.in%2Fseries%2Fdiamond-no-ace-s3%2F`
-
-
-
-**/oploverz/getDownloadLinks?link={url}**
-
-Keterangan: Mengambil daftar tautan unduh dari halaman episode.
-
-Parameter:
-
-- `url` - url series (Contoh: https://www.oploverz.in/diamond-no-ace-s3-31-subtitle-indonesia/)
-
-Contoh: `/oploverz/getDownloadLinks?link=https%3A%2F%2Fwww.oploverz.in%2Fdiamond-no-ace-s3-31-subtitle-indonesia%2F`
-
-
-
-**/oploverz/hexa?link={url}**
-
-Keterangan: Mengambil tautan unduh asli dari hightech (sekarang xmaster.xyz).
-
-Parameter:
-
-- `url` - url series (Contoh: https://www.oploverz.in/diamond-no-ace-s3-31-subtitle-indonesia/)
-
-Contoh: `/oploverz/getDownloadLinks?link=https%3A%2F%2Fwww.oploverz.in%2Fdiamond-no-ace-s3-31-subtitle-indonesia%2F`
-
-
-
-**/kusonime/animeList**
-
-Keterangan: Mengambil daftar anime dari halaman anime list.
-
-Contoh: `/kusonime/animeList`
-
-
-
-**/kusonime/homePage?page={page}**
-
-Keterangan: Mengambil daftar rilisan dari halaman home.
-
-Parameter:
-
-- `page` - nomor halaman (optional)
+## Instalasi
-Contoh: `/kusonime/homePage?page=20`
-
-
-
-**/kusonime/getDownloadLinks?link={url}**
+1. Instal [node.js](https://nodejs.org/en/).
-Keterangan: Mengambil daftar tautan unduh dari halaman rilisan.
+2. Download [rilisan terbaru Shallty](https://github.com/gegehprast/shallty/releases) atau clone repositori ini.
-Parameter:
+3. Unzip dan masuk ke root direktori lalu jalankan `npm install`.
-- `url` - url rilisan (Contoh: https://kusonime.com/sora-no-method-ova-batch-subtitle-indonesia/)
+4. Ganti nama file `config.example.json` menjadi `config.json`.
-Contoh: `/kusonime/getDownloadLinks?link=https%3A%2F%2Fkusonime.com%2Fsora-no-method-ova-batch-subtitle-indonesia%2F`
-
-
-
-**/kusonime/semrawut?link={url}**
+5. Sesuaikan `config.json`, misalnya ganti `app_env` menjadi `local` untuk bisa melihat prosesnya secara langsung.
-Keterangan: Mengambil tautan unduh asli dari shortlink semrawut/semawur/kepoow/sukakesehattan/jelajahinternet.
+6. Jalankan perintah `node index.js` di terminal untuk memulai aplikasi. Kamu akan mendapatkan pesan server dan crawler ready jika tidak ada masalah.
-Parameter:
-- `url` - url shortlink (Contoh: https://kepoow.me/?r=aHR0cHM6Ly9kcml2ZS5nb29nbGUuY29tL2ZpbGUvZC8xQjNlY2h4dEYwMFNUbVRRWklEcW8xUVJ3a1RHTmFSaXkvdmlldw==)
+## Dokumentasi
-Contoh: `/kusonime/semrawut?link=https%3A%2F%2Fkepoow.me%2F%3Fr%3DaHR0cHM6Ly9kcml2ZS5nb29nbGUuY29tL2ZpbGUvZC8xQjNlY2h4dEYwMFNUbVRRWklEcW8xUVJ3a1RHTmFSaXkvdmlldw%3D%3D`
+https://documenter.getpostman.com/view/3762767/SWLh5STA?version=latest
## Bantuan, Lapor Bug, dan Kritik dan Saran
diff --git a/config.example.json b/config.example.json
index be36864..9783080 100644
--- a/config.example.json
+++ b/config.example.json
@@ -1,8 +1,9 @@
{
"app_env": "production",
"app_port": 8080,
+ "meownime_url": "https://meownime.com",
"samehadaku_url": "https://www.samehadaku.tv",
- "neonime_url": "https://neonime.net",
+ "neonime_url": "https://neonime.org",
"samehadaku_magBoxContainer": 1,
"oploverz_url": "https://www.oploverz.in",
"kusonime_url": "https://kusonime.com",
diff --git a/controllers/KiryuuController.js b/controllers/KiryuuController.js
index 483be74..2bd21e2 100644
--- a/controllers/KiryuuController.js
+++ b/controllers/KiryuuController.js
@@ -1,9 +1,8 @@
-const Browser = require('../services/Browser')
-const Kiryuu = new (require('../services/Kiryuu'))(Browser)
+const Kiryuu = require('../fantls/Kiryuu')
class KiryuuController {
async mangaList(req, res) {
- const manga = await Kiryuu.getMangaList()
+ const manga = await Kiryuu.mangaList()
if (manga.error) {
res.status(500).json({
status: 500,
@@ -19,7 +18,7 @@ class KiryuuController {
}
async mangaInfo(req, res) {
- const result = await Kiryuu.getMangaInfo(req.query.link)
+ const result = await Kiryuu.mangaInfo(req.query.link)
if (result.error) {
res.status(500).json({
status: 500,
@@ -35,7 +34,7 @@ class KiryuuController {
}
async chapters(req, res) {
- const chapters = await Kiryuu.getChapters(req.query.link)
+ const chapters = await Kiryuu.chapters(req.query.link)
if (chapters.error) {
res.status(500).json({
status: 500,
@@ -51,7 +50,7 @@ class KiryuuController {
}
async images(req, res) {
- const images = await Kiryuu.getImages(req.query.link)
+ const images = await Kiryuu.images(req.query.link)
if (images.error) {
res.status(500).json({
status: 500,
@@ -67,7 +66,7 @@ class KiryuuController {
}
async newReleases(req, res) {
- const releases = await Kiryuu.getNewReleases(req.query.link)
+ const releases = await Kiryuu.newReleases(req.query.link)
if (releases.error) {
res.status(500).json({
status: 500,
diff --git a/controllers/KusonimeController.js b/controllers/KusonimeController.js
index 2bf7a09..c62d2d0 100644
--- a/controllers/KusonimeController.js
+++ b/controllers/KusonimeController.js
@@ -1,9 +1,8 @@
-const Browser = require('../services/Browser')
-const Kusonime = require('../services/Kusonime')
+const Kusonime = require('../fansubs/Kusonime')
class KusonimeController {
async animeList(req, res) {
- const anime = await new Kusonime(Browser).animeList()
+ const anime = await Kusonime.animeList()
if (anime.error) {
res.status(500).json({
status: 500,
@@ -18,8 +17,8 @@ class KusonimeController {
}
}
- async homePage(req, res) {
- const posts = await new Kusonime(Browser).homePage(req.query.page)
+ async newReleases(req, res) {
+ const posts = await Kusonime.newReleases(req.query.page)
if (posts.error) {
res.status(500).json({
status: 500,
@@ -34,8 +33,8 @@ class KusonimeController {
}
}
- async getDownloadLinks(req, res) {
- const data = await new Kusonime(Browser).getDownloadLinks(req.query.link)
+ async links(req, res) {
+ const data = await Kusonime.links(req.query.link)
if (data.error) {
res.status(500).json({
status: 500,
@@ -49,22 +48,6 @@ class KusonimeController {
})
}
}
-
- async semrawut(req, res) {
- const semrawut = await new Kusonime(Browser).semrawut(req.query.link)
- if (semrawut.error) {
- res.status(500).json({
- status: 500,
- message: semrawut.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: semrawut
- })
- }
- }
}
module.exports = new KusonimeController
\ No newline at end of file
diff --git a/controllers/MeownimeController.js b/controllers/MeownimeController.js
deleted file mode 100644
index 9071935..0000000
--- a/controllers/MeownimeController.js
+++ /dev/null
@@ -1,120 +0,0 @@
-const Browser = require('../services/Browser')
-const Meownime = new(require('../services/Meownime'))(Browser)
-
-class MeownimeController {
- async anime(req, res) {
- const episodes = await Meownime.getEpisodes(req.query.link)
- if (episodes.error) {
- res.status(500).json({
- status: 500,
- message: episodes.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: episodes
- })
- }
- }
-
- async movie(req, res) {
- const episodes = await Meownime.getMovieEpisodes(req.query.link)
- if (episodes.error) {
- res.status(500).json({
- status: 500,
- message: episodes.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: episodes
- })
- }
- }
-
- async davinsurance(req, res) {
- const link = await Meownime.davinsurance(req.query.link)
- if (link.error) {
- res.status(500).json({
- status: 500,
- message: link.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: link
- })
- }
- }
-
- async meowbox(req, res) {
- const link = await Meownime.meowbox(req.query.link)
- if (link.error) {
- res.status(500).json({
- status: 500,
- message: link.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: link
- })
- }
- }
-
- async meowdrive(req, res) {
- const link = await Meownime.meowdrive(req.query.link)
- if (link.error) {
- res.status(500).json({
- status: 500,
- message: link.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: link
- })
- }
- }
-
- async checkOnGoingPage(req, res) {
- const anime = await Meownime.checkOnGoingPage()
- if (anime.error) {
- res.status(500).json({
- status: 500,
- message: anime.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: anime
- })
- }
- }
-
- async onGoingAnime(req, res) {
- const anime = await Meownime.onGoingAnime(req.query.link)
- if (anime.error) {
- res.status(500).json({
- status: 500,
- message: anime.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: anime
- })
- }
- }
-}
-
-
-
-module.exports = new MeownimeController
\ No newline at end of file
diff --git a/controllers/MoenimeController.js b/controllers/MoenimeController.js
index 84f49db..8784919 100644
--- a/controllers/MoenimeController.js
+++ b/controllers/MoenimeController.js
@@ -1,5 +1,4 @@
-const Browser = require('../services/Browser')
-const Moenime = new (require('../services/Moenime'))(Browser)
+const Moenime = require('../fansubs/Moenime')
class MoenimeController {
async animeList(req, res) {
@@ -19,7 +18,7 @@ class MoenimeController {
}
}
- async episodes(req, res) {
+ async links(req, res) {
const episodes = await Moenime.episodes(req.query.link)
if (episodes.error) {
res.status(500).json({
@@ -50,22 +49,6 @@ class MoenimeController {
})
}
}
-
- async teknoku(req, res) {
- const url = await Moenime.teknoku(req.query.link)
- if (url.error) {
- res.status(500).json({
- status: 500,
- message: url.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: url
- })
- }
- }
}
module.exports = new MoenimeController
\ No newline at end of file
diff --git a/controllers/NeonimeController.js b/controllers/NeonimeController.js
index d0af919..ccf0691 100644
--- a/controllers/NeonimeController.js
+++ b/controllers/NeonimeController.js
@@ -1,9 +1,8 @@
-const Browser = require('../services/Browser')
-const Neonime = new (require('../services/Neonime'))(Browser)
+const Neonime = require('../fansubs/Neonime')
class NeonimeController {
- async checkOnGoingPage(req, res) {
- const anime = await Neonime.checkOnGoingPage()
+ async newReleases(req, res) {
+ const anime = await Neonime.newReleases()
if (anime.error) {
res.status(500).json({
status: 500,
@@ -34,8 +33,8 @@ class NeonimeController {
}
}
- async tvShow(req, res) {
- const episodes = await Neonime.tvShow(req.query.link)
+ async episodes(req, res) {
+ const episodes = await Neonime.episodes(req.query.link)
if (episodes.error) {
res.status(500).json({
status: 500,
@@ -50,8 +49,8 @@ class NeonimeController {
}
}
- async getBatchEpisodes(req, res) {
- const episodes = await Neonime.getBatchEpisodes(req.query.link)
+ async links(req, res) {
+ const episodes = await Neonime.links(req.query.link)
if (episodes.error) {
res.status(500).json({
status: 500,
@@ -65,38 +64,6 @@ class NeonimeController {
})
}
}
-
- async getEpisodes(req, res) {
- const episodes = await Neonime.getEpisodes(req.query.link)
- if (episodes.error) {
- res.status(500).json({
- status: 500,
- message: episodes.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: episodes
- })
- }
- }
-
- async hightech(req, res) {
- const url = await Neonime.hightech(req.query.link)
- if (url.error) {
- res.status(500).json({
- status: 500,
- message: url.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: url
- })
- }
- }
}
module.exports = new NeonimeController
\ No newline at end of file
diff --git a/controllers/OploverzController.js b/controllers/OploverzController.js
index 30eddc0..44701f4 100644
--- a/controllers/OploverzController.js
+++ b/controllers/OploverzController.js
@@ -1,9 +1,8 @@
-const Browser = require('../services/Browser')
-const Oploverz = new (require('../services/Oploverz'))(Browser)
+const Oploverz = require('../fansubs/Oploverz')
class OploverzController {
- async checkOnGoingPage(req, res) {
- const anime = await Oploverz.checkOnGoingPage()
+ async animeList(req, res) {
+ const anime = await Oploverz.animeList()
if (anime.error) {
res.status(500).json({
status: 500,
@@ -18,50 +17,50 @@ class OploverzController {
}
}
- async series(req, res) {
- const episodes = await Oploverz.series(req.query.link)
- if (episodes.error) {
+ async newReleases(req, res) {
+ const anime = await Oploverz.newReleases()
+ if (anime.error) {
res.status(500).json({
status: 500,
- message: episodes.message
+ message: anime.message
})
} else {
res.json({
status: 200,
message: 'Success',
- data: episodes
+ data: anime
})
}
}
- async getDownloadLinks(req, res) {
- const links = await Oploverz.getDownloadLinks(req.query.link)
- if (links.error) {
+ async episodes(req, res) {
+ const episodes = await Oploverz.episodes(req.query.link)
+ if (episodes.error) {
res.status(500).json({
status: 500,
- message: links.message
+ message: episodes.message
})
} else {
res.json({
status: 200,
message: 'Success',
- data: links
+ data: episodes
})
}
}
- async hexa(req, res) {
- const hexa = await Oploverz.hexa(req.query.link)
- if (hexa.error) {
+ async links(req, res) {
+ const links = await Oploverz.links(req.query.link)
+ if (links.error) {
res.status(500).json({
status: 500,
- message: hexa.message
+ message: links.message
})
} else {
res.json({
status: 200,
message: 'Success',
- data: hexa
+ data: links
})
}
}
diff --git a/controllers/SamehadakuController.js b/controllers/SamehadakuController.js
index 557b67a..b1f9882 100644
--- a/controllers/SamehadakuController.js
+++ b/controllers/SamehadakuController.js
@@ -1,11 +1,25 @@
-const Browser = require('../services/Browser')
-const Samehadaku = new (require('../services/Samehadaku'))(Browser)
-const SamehadakuEas = new (require('../services/SamehadakuEas'))(Browser)
+const Samehadaku = require('../fansubs/Samehadaku')
const Util = require('../utils/utils')
class SamehadakuController {
- async anime(req, res) {
- const episodes = await SamehadakuEas.getEpisodes(req.query.link)
+ async animeList(req, res) {
+ const anime = await Samehadaku.animeList()
+ if (anime.error) {
+ res.status(500).json({
+ status: 500,
+ message: anime.message
+ })
+ } else {
+ res.json({
+ status: 200,
+ message: 'Success',
+ data: anime
+ })
+ }
+ }
+
+ async episodes(req, res) {
+ const episodes = await Samehadaku.episodes(req.query.link)
if (episodes.error) {
res.status(500).json({
status: 500,
@@ -20,15 +34,15 @@ class SamehadakuController {
}
}
- async checkOnGoingPage(req, res) {
- const anime = await SamehadakuEas.checkOnGoingPage()
- const animeArr = [], checkOnGoingPageArr = []
+ async newReleases(req, res) {
+ const anime = await Samehadaku.newReleases()
+ const animeArr = [], checkNewReleases = []
for (let i = 2; i < 8; i++) {
- checkOnGoingPageArr.push(SamehadakuEas.checkOnGoingPage(i))
+ checkNewReleases.push(Samehadaku.newReleases(i))
}
- await Promise.all(checkOnGoingPageArr)
+ await Promise.all(checkNewReleases)
.then(values => {
for (let i = 2; i < 8; i++) {
animeArr[i] = values[i - 2].error ? [] : values[i - 2]
@@ -54,8 +68,8 @@ class SamehadakuController {
})
}
- async getDownloadLinks(req, res) {
- const links = await Samehadaku.getDownloadLinks(req.query.link)
+ async links(req, res) {
+ const links = await Samehadaku.links(req.query.link)
if (links.error) {
res.status(500).json({
status: 500,
@@ -69,38 +83,6 @@ class SamehadakuController {
})
}
}
-
- async tetew(req, res) {
- const tetew = await Samehadaku.anjay(req.query.link)
- if (tetew.error) {
- res.status(500).json({
- status: 500,
- message: tetew.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: tetew
- })
- }
- }
-
- async njiir(req, res) {
- const njiir = await Samehadaku.njiir(req.query.link)
- if (njiir.error) {
- res.status(500).json({
- status: 500,
- message: njiir.message
- })
- } else {
- res.json({
- status: 200,
- message: 'Success',
- data: njiir
- })
- }
- }
}
module.exports = new SamehadakuController
\ No newline at end of file
diff --git a/controllers/ShortlinkController.js b/controllers/ShortlinkController.js
new file mode 100644
index 0000000..f73e00d
--- /dev/null
+++ b/controllers/ShortlinkController.js
@@ -0,0 +1,21 @@
+const Shorlink = require('../shortlinks')
+
+class ShortlinkController {
+ async index(req, res) {
+ const data = await Shorlink.parse(req.query.link)
+ if (data.error) {
+ res.status(500).json({
+ status: 500,
+ message: data.message
+ })
+ } else {
+ res.json({
+ status: 200,
+ message: 'Success',
+ data: data
+ })
+ }
+ }
+}
+
+module.exports = new ShortlinkController
\ No newline at end of file
diff --git a/deploy.sh b/deploy.sh
index 14d3940..5b6ed31 100644
--- a/deploy.sh
+++ b/deploy.sh
@@ -1,13 +1,13 @@
#!/bin/bash
set -x
-if [ $TRAVIS_BRANCH == 'master' ] ; then
+if [ $TRAVIS_BRANCH == 'deploy' ] ; then
git init
git remote add deploy "ssh://deploy@$REMOTE_ADDRESS:/home/deploy/shallty/.git"
git config user.name "Travis CI"
git config user.email "travisCI@gmail.com"
git add .
git commit -m "Deploy"
- git push deploy master --force
+ git push deploy deploy --force
else
- echo "Not deploying, since this branch isn't master."
+ echo "Not deploying, since this branch isn't deploy."
fi
\ No newline at end of file
diff --git a/fansubs/Kusonime.js b/fansubs/Kusonime.js
new file mode 100644
index 0000000..8e0b191
--- /dev/null
+++ b/fansubs/Kusonime.js
@@ -0,0 +1,181 @@
+const Browser = require('../Browser')
+const Util = require('../utils/utils')
+const Handler = require('../exceptions/Handler')
+const { kusonime_url } = require('../config.json')
+
+class Kusonime {
+ /**
+ * Parse and get anime list. Currently support only up to page 2.
+ */
+ async animeList() {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ let animeList = []
+ for (let i = 1; i < 3; i++) {
+ await page.goto(`${kusonime_url}/anime-list-batch/${i > 1 ? `page/${i}/`: ''}`)
+ const anchors = await Browser.$$waitAndGet(page, 'a.kmz')
+ await Util.asyncForEach(anchors, async (anchor) => {
+ const title = await Browser.getPlainProperty(anchor, 'innerText')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+ const link = rawLink.replace(kusonime_url, '')
+
+ animeList.push({
+ title: title,
+ link: link,
+ raw_link: rawLink
+ })
+ })
+ }
+
+ await page.close()
+
+ return animeList
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse home page and get post list.
+ *
+ * @param {Number} homePage Home page.
+ */
+ async newReleases(homePage = 1) {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ const posts = []
+ await page.goto(`${kusonime_url}/page/${homePage}`)
+ const kovers = await Browser.$$waitAndGet(page, 'div.venz > ul > div.kover')
+ await Util.asyncForEach(kovers, async (kover) => {
+ const anchor = await kover.$('.episodeye > a')
+ const title = await Browser.getPlainProperty(anchor, 'innerText')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+ const link = rawLink.replace(kusonime_url, '')
+
+ posts.push({
+ title: title,
+ link: link,
+ raw_link: rawLink
+ })
+ })
+
+ await page.close()
+
+ return posts
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse download links from episode page of a title.
+ *
+ * @param {String} link Episode page url.
+ */
+ async links(link) {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(kusonime_url + link, {
+ timeout: 300000
+ })
+
+ const dlbod = await Browser.$waitAndGet(page, 'div.dlbod')
+ const smokeddls = await dlbod.$$('div.smokeddl')
+ const downloadLinks = smokeddls.length > 0 ? await this.parseSmokeddl(smokeddls) : await this.parseZeroSmodeddl(dlbod)
+
+ await page.close()
+
+ return downloadLinks
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse download links from episode page of a title that does not have smokeddl div.
+ *
+ * @param smokeddls dlbod ElementHandle.
+ */
+ async parseSmokeddl(smokeddls) {
+ let downloadLinks = []
+ await Util.asyncForEach(smokeddls, async (smokeddl) => {
+ let smokettl = await smokeddl.$('div.smokettl')
+ if (typeof smokettl == 'undefined' || !smokettl) {
+ smokettl = await smokeddl.$('.smokeurl:nth-child(1)')
+ }
+ const episodeTitle = await Browser.getPlainProperty(smokettl, 'innerText')
+ const smokeurls = await smokeddl.$$('div.smokeurl')
+ const newDownloadLinks = await this.parseSmokeurl(smokeurls, episodeTitle)
+ downloadLinks = downloadLinks.concat(newDownloadLinks)
+ })
+
+ return downloadLinks
+ }
+
+ /**
+ * Parse download links from episode page of a title that does not have smokeddl div.
+ *
+ * @param dlbod dlbod ElementHandle.
+ */
+ async parseZeroSmodeddl(dlbod) {
+ let smokettl = await dlbod.$('div.smokettl')
+ if (typeof smokettl == 'undefined' || !smokettl) {
+ smokettl = await dlbod.$('.smokeurl:nth-child(1)')
+ }
+ const episodeTitle = await Browser.getPlainProperty(smokettl, 'innerText')
+ const smokeurls = await dlbod.$$('div.smokeurl')
+ const downloadLinks = await this.parseSmokeurl(smokeurls, episodeTitle)
+
+ return downloadLinks
+ }
+
+ /**
+ * Parse download links from smokeurls div.
+ *
+ * @param smokeurls ElementHandle.
+ * @param {String} episodeTitle Episode title.
+ */
+ async parseSmokeurl(smokeurls, episodeTitle) {
+ const downloadLinks = []
+ const episodeMatches = episodeTitle.match(/([\d-]+)/g)
+ const numeral = episodeMatches[0].length == 1 ? '0' + episodeMatches[0] : episodeMatches[0]
+
+ await Util.asyncForEach(smokeurls, async (smokeurl) => {
+ const anchors = await smokeurl.$$('a')
+ const strong = await smokeurl.$('strong')
+ if (typeof strong == 'undefined' || !strong) {
+ return false
+ }
+
+ const quality = await Browser.getPlainProperty(strong, 'innerText')
+
+ await Util.asyncForEach(anchors, async (anchor) => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+
+ const episode = {
+ 'episode': numeral,
+ 'quality': quality,
+ 'host': host,
+ 'link': link
+ }
+
+ downloadLinks.push(episode)
+ })
+ })
+
+ return downloadLinks
+ }
+}
+
+module.exports = new Kusonime
\ No newline at end of file
diff --git a/services/Moenime.js b/fansubs/Moenime.js
similarity index 73%
rename from services/Moenime.js
rename to fansubs/Moenime.js
index ebcbce4..6114f92 100644
--- a/services/Moenime.js
+++ b/fansubs/Moenime.js
@@ -1,38 +1,36 @@
+const Browser = require('../Browser')
const Util = require('../utils/utils')
const Handler = require('../exceptions/Handler')
const { moenime_url } = require('../config.json')
class Moenime {
- constructor(browser) {
- this.browser = browser
- }
-
/**
* Get anime list from anime list page.
*
* @param {String} show Show type, could be: movie, ongoing or, all.
*/
async animeList(show = 'all') {
- const page = await this.browser.newOptimizedPage()
+ const page = await Browser.newOptimizedPage()
try {
- await page.goto(moenime_url + '/daftar-anime-baru/', {
- timeout: 60000
- })
+ const anime = []
+ await page.goto(moenime_url + '/daftar-anime-baru/')
- await page.waitForSelector('div.tab-content')
- const animeList = await page.$$eval(`div.tab-content #${show} a.nyaalist`, nodes => nodes.map(x => {
- const title = x.innerText
- const link = x.href
+ const nyaalist = await Browser.$$waitAndGet(page, `div.tab-content #${show} a.nyaalist`)
+ await Util.asyncForEach(nyaalist, async (anchor) => {
+ const title = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
- return {
- link: link,
- title: title
- }
- }))
+ anime.push({
+ title: title,
+ link: link.replace(moenime_url, ''),
+ raw_link: link
+ })
+ })
+
await page.close()
- return animeList
+ return anime
} catch (error) {
await page.close()
@@ -41,140 +39,19 @@ class Moenime {
}
/**
- * Parse files of an episode.
- *
- * @param {ElementHandle} page ElementHandle.
- * @param {ElementHandle} table ElementHandle.
- * @param {ElementHandle} tRow ElementHandle.
- */
- async parseOngoingEpisodeFiles(page, table, tRow) {
- const files = []
- let episode = await table.$eval('center', node => node.innerText)
- const matches = episode.match(/Episode ([0-9])+/g)
- if (!matches)
- return {}
-
- const alpha = matches[0].replace(/\s|-/g, '_').toLowerCase()
- const qualityHandle = await page.evaluateHandle(tRow => tRow.previousElementSibling, tRow)
- const quality = await this.browser.getPlainProperty(qualityHandle, 'innerText')
-
- const anchors = await tRow.$$('a')
- await Util.asyncForEach(anchors, async anchor => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- files.push({
- quality: quality,
- host: host,
- link: link
- })
- })
-
- return {
- alpha: alpha,
- files: files
- }
- }
-
- /**
- * Parse files of a completed episode.
- *
- * @param {String} quality Episode quality.
- * @param {ElementHandle} episodeDiv ElementHandle.
- * @param {ElementHandle} dlLinkRow ElementHandle.
- */
- async parseCompletedEpisodeFiles(quality, episodeDiv, dlLinkRow) {
- const files = []
- const episode = await episodeDiv.$eval('td', node => node.innerText)
-
- const episodeSplit = episode.split(' — ')
- const alpha = episodeSplit[0].replace(/\s|-/g, '_').toLowerCase()
- const size = episodeSplit[1]
-
- const dlLinkAnchors = await dlLinkRow.$$('a')
- await Util.asyncForEach(dlLinkAnchors, async (anchor) => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- files.push({
- quality: `${quality.split(' — ')[1]} - ${size}`,
- host: host,
- link: link
- })
- })
-
- return {
- alpha: alpha,
- files: files
- }
- }
-
- /**
- * Parse files of a batch episode.
- *
- * @param {ElementHandle} episodeDiv ElementHandle.
- */
- async parseBatchEpisodeFiles(episodeDiv) {
- const files = []
- const quality = await episodeDiv.$eval('tr:not([bgcolor="#eee"]', node => node.innerText)
- const dlLinkAnchors = await episodeDiv.$$('tr[bgcolor="#eee"] a')
-
- await Util.asyncForEach(dlLinkAnchors, async (anchor) => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- files.push({
- quality: quality.replace(' | ', ' - '),
- host: host,
- link: link
- })
- })
-
- return files
- }
-
- /**
- * Parse files of a completed episode.
- *
- * @param {String} quality Episode quality.
- * @param {ElementHandle} episodeDiv ElementHandle.
- * @param {ElementHandle} dlLinkRow ElementHandle.
- */
- async parseMovieEpisodeFiles(qualityTrow, filesTRow) {
- const files = []
- const quality = await this.browser.getPlainProperty(qualityTrow, 'innerText')
-
- const anchors = await filesTRow.$$('a')
- await Util.asyncForEach(anchors, async anchor => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- files.push({
- quality: quality,
- host: host,
- link: link
- })
- })
-
- return files
- }
-
- /**
- * Get episodes from ongoing anime page.
+ * Get episodes with links from ongoing anime page.
*
* @param {String} link Anime page url.
*/
async episodes(link) {
- const page = await this.browser.newOptimizedPage()
+ const page = await Browser.newOptimizedPage()
try {
let episodes = {}
link = decodeURIComponent(link)
- await page.goto(moenime_url + link, {
- timeout: 60000
- })
+ await page.goto(moenime_url + link)
- const tRowsHandle = await this.browser.waitAndGetSelectors(page, 'tr[bgcolor="#eee"]')
+ const tRowsHandle = await Browser.$$waitAndGet(page, 'tr[bgcolor="#eee"]')
await Util.asyncForEach(tRowsHandle, async tRowHandle => {
// search for previous sibling table element
let tableHandle = await page.evaluateHandle(tRow => {
@@ -220,7 +97,7 @@ class Moenime {
try {
const episodes = {}
- const moeDlLinks = await this.browser.waitAndGetSelectors(page, 'div.moe-dl-link')
+ const moeDlLinks = await Browser.$$waitAndGet(page, 'div.moe-dl-link')
await Util.asyncForEach(moeDlLinks, async (moeDlLink) => {
const quality = await moeDlLink.$eval('div.tombol', nodes => nodes.innerText)
if (!quality.toLowerCase().includes('batch')) {
@@ -257,7 +134,7 @@ class Moenime {
try {
const episodes = {}
- const tRowsHandle = await this.browser.waitAndGetSelectors(page, 'tr[bgcolor="#eee"]')
+ const tRowsHandle = await Browser.$$waitAndGet(page, 'tr[bgcolor="#eee"]')
await Util.asyncForEach(tRowsHandle, async tRowHandle => {
// search for previous sibling tr element
let trQualityhandle = await page.evaluateHandle(tRow => {
@@ -285,13 +162,11 @@ class Moenime {
*
*/
async newReleases() {
- const page = await this.browser.newOptimizedPage()
+ const page = await Browser.newOptimizedPage()
try {
const anime = []
- await page.goto(moenime_url + '/tag/ongoing/', {
- timeout: 60000
- })
+ await page.goto(moenime_url + '/tag/ongoing/')
await page.waitForSelector('article')
const articles = await page.$$('article')
@@ -304,7 +179,8 @@ class Moenime {
anime.push({
episode: episode.split(' ')[1],
title: info.title,
- link: info.link.replace(moenime_url, '').replace(/\/+$/, '')
+ link: info.link.replace(moenime_url, '').replace(/\/+$/, ''),
+ raw_link: info.link
})
})
@@ -318,40 +194,124 @@ class Moenime {
}
}
- async teknoku(link) {
- const page = await this.browser.newOptimizedPage()
+ /**
+ * Parse files of an episode.
+ *
+ * @param {ElementHandle} page ElementHandle.
+ * @param {ElementHandle} table ElementHandle.
+ * @param {ElementHandle} tRow ElementHandle.
+ */
+ async parseOngoingEpisodeFiles(page, table, tRow) {
+ const files = []
+ let episode = await table.$eval('center', node => node.innerText)
+ const matches = episode.match(/Episode ([0-9])+/g)
+ if (!matches)
+ return {}
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 60000
+ const alpha = matches[0].replace(/\s|-/g, '_').toLowerCase()
+ const qualityHandle = await page.evaluateHandle(tRow => tRow.previousElementSibling, tRow)
+ const quality = await Browser.getPlainProperty(qualityHandle, 'innerText')
+
+ const anchors = await tRow.$$('a')
+ await Util.asyncForEach(anchors, async anchor => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+
+ files.push({
+ quality: quality,
+ host: host,
+ link: link
})
+ })
- await Promise.all([
- page.waitForNavigation({
- timeout: 60000,
- waitUntil: 'domcontentloaded'
- }),
- page.$eval('#srl > form', form => form.submit()),
- ])
+ return {
+ alpha: alpha,
+ files: files
+ }
+ }
- const fullContent = await page.content()
- await page.close()
+ /**
+ * Parse files of a completed episode.
+ *
+ * @param {String} quality Episode quality.
+ * @param {ElementHandle} episodeDiv ElementHandle.
+ * @param {ElementHandle} dlLinkRow ElementHandle.
+ */
+ async parseCompletedEpisodeFiles(quality, episodeDiv, dlLinkRow) {
+ const files = []
+ const episode = await episodeDiv.$eval('td', node => node.innerText)
- // eslint-disable-next-line quotes
- let splitted = fullContent.split("function changeLink(){var a='")
- splitted = splitted[1].split(';window.open')
- splitted = splitted[0].replace(/(['"])+/g, '')
+ const episodeSplit = episode.split(' — ')
+ const alpha = episodeSplit[0].replace(/\s|-/g, '_').toLowerCase()
+ const size = episodeSplit[1]
+
+ const dlLinkAnchors = await dlLinkRow.$$('a')
+ await Util.asyncForEach(dlLinkAnchors, async (anchor) => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
- return {
- url: splitted
- }
- } catch (error) {
- await page.close()
+ files.push({
+ quality: `${quality.split(' — ')[1]} - ${size}`,
+ host: host,
+ link: link
+ })
+ })
- return Handler.error(error)
+ return {
+ alpha: alpha,
+ files: files
}
}
+
+ /**
+ * Parse files of a batch episode.
+ *
+ * @param {ElementHandle} episodeDiv ElementHandle.
+ */
+ async parseBatchEpisodeFiles(episodeDiv) {
+ const files = []
+ const quality = await episodeDiv.$eval('tr:not([bgcolor="#eee"]', node => node.innerText)
+ const dlLinkAnchors = await episodeDiv.$$('tr[bgcolor="#eee"] a')
+
+ await Util.asyncForEach(dlLinkAnchors, async (anchor) => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+
+ files.push({
+ quality: quality.replace(' | ', ' - '),
+ host: host,
+ link: link
+ })
+ })
+
+ return files
+ }
+
+ /**
+ * Parse files of a completed episode.
+ *
+ * @param {String} quality Episode quality.
+ * @param {ElementHandle} episodeDiv ElementHandle.
+ * @param {ElementHandle} dlLinkRow ElementHandle.
+ */
+ async parseMovieEpisodeFiles(qualityTrow, filesTRow) {
+ const files = []
+ const quality = await Browser.getPlainProperty(qualityTrow, 'innerText')
+
+ const anchors = await filesTRow.$$('a')
+ await Util.asyncForEach(anchors, async anchor => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+
+ files.push({
+ quality: quality,
+ host: host,
+ link: link
+ })
+ })
+
+ return files
+ }
}
-module.exports = Moenime
\ No newline at end of file
+module.exports = new Moenime
\ No newline at end of file
diff --git a/fansubs/Neonime.js b/fansubs/Neonime.js
new file mode 100644
index 0000000..dafd988
--- /dev/null
+++ b/fansubs/Neonime.js
@@ -0,0 +1,266 @@
+const Browser = require('../Browser')
+const Util = require('../utils/utils')
+const Handler = require('../exceptions/Handler')
+const { neonime_url } = require('../config.json')
+
+class Neonime {
+ /**
+ * Parse and get new released episodes.
+ */
+ async newReleases() {
+ const anime = []
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ await page.goto(neonime_url + '/episode/')
+
+ const tRows = await Browser.$$waitAndGet(page, 'table.list tbody > tr')
+ await Util.asyncForEach(tRows, async (trow) => {
+ const anchor = await trow.$('td.bb > a')
+ const text = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+ const epsSplit = text.split(' Episode ')
+ const episode = epsSplit[epsSplit.length - 1]
+ const numeral = episode.length == 1 ? '0' + episode : episode
+ const title = text.split(' Subtitle')[0]
+
+ anime.push({
+ episode: numeral,
+ title: title,
+ link: link.replace(neonime_url, ''),
+ raw_link: link
+ })
+ })
+
+
+ await page.close()
+
+ return anime
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse and get anime list.
+ */
+ async animeList() {
+ const animeList = []
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ await page.goto(neonime_url + '/list-anime/')
+
+ const anchors = await Browser.$$waitAndGet(page, '#az-slider a')
+ await Util.asyncForEach(anchors, async (anchor) => {
+ const title = await Browser.getPlainProperty(anchor, 'innerHTML')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+ const link = rawLink.replace(neonime_url, '')
+
+ animeList.push({
+ title: title.trim(),
+ link: link,
+ raw_link: rawLink,
+ is_batch: link.startsWith('/batch')
+ })
+ })
+
+ await page.close()
+
+ return animeList
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse tv show page and get episodes.
+ * @param link tv show page.
+ */
+ async episodes(link) {
+ const episodes = []
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+
+ if (link.startsWith('/batch')) {
+ return this.parseBatchLinks(link)
+ }
+
+ await page.goto(neonime_url + link)
+
+ await page.waitForSelector('div.episodiotitle')
+ const episodios = await page.$$('div.episodiotitle')
+ await Util.asyncForEach(episodios, async episodio => {
+ const anchor = await episodio.$('a')
+ const episode = await Browser.getPlainProperty(anchor, 'innerHTML')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+ const link = rawLink.replace(neonime_url, '')
+ const episodeMatches = episode.match(/([\d-]+)/g)
+ const numeral = episodeMatches[0].length == 1 ? '0' + episodeMatches[0] : episodeMatches[0]
+
+ episodes.push({
+ episode: numeral.trim(),
+ link: link,
+ raw_link: rawLink
+ })
+ })
+ await page.close()
+
+ return episodes
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse episode page and get download links.
+ * @param link episode page.
+ */
+ async links(link) {
+ link = decodeURIComponent(link)
+
+ if (link.startsWith('/batch')) {
+ return this.parseBatchLinks(link)
+ }
+
+ return this.parseLinks(link)
+ }
+
+ /**
+ * Parse episode page and get download links.
+ * @param link episode page.
+ */
+ async parseLinks(link) {
+ const links = []
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(neonime_url + link)
+
+ await page.waitForSelector('div.central > div > ul > ul')
+ const list = await page.$$('div > ul > ul')
+ await Util.asyncForEach(list, async item => {
+ const quality = await item.$eval('label', node => node.innerText)
+ const anchors = await item.$$('a')
+ await Util.asyncForEach(anchors, async anchor => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+
+ if (link != neonime_url && !host.toLowerCase().includes('proses')) {
+ links.push({
+ quality: quality.trim(),
+ host: host,
+ link: link
+ })
+ }
+ })
+ })
+
+ await page.close()
+
+ return links
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse batch episode page and get download links.
+ * @param link episode page.
+ */
+ async parseBatchLinks(link) {
+ let isInfoOne = false
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(neonime_url + link)
+
+ await page.waitForSelector('.smokeurl').catch(e => {
+ Handler.error(e)
+ isInfoOne = true
+ })
+
+ const links = !isInfoOne ? await this.parseSmokeUrl(page) : await this.parseInfoOne(page)
+
+ await page.close()
+
+ return links
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse batch episode page that using info1 element.
+ * @param page episode page instance.
+ */
+ async parseInfoOne(page) {
+ const links = []
+ await page.waitForSelector('p[data-id="info1"]').catch(async e => {
+ await page.close()
+
+ return Handler.error(e)
+ })
+ const smokeurls = await page.$$('p[data-id="info1"]')
+ await Util.asyncForEach(smokeurls, async smokeurl => {
+ const strong = await smokeurl.$('strong')
+ if (strong && strong != null) {
+ const quality = await smokeurl.$eval('strong', node => node.innerText)
+ const anchors = await smokeurl.$$('a')
+ await Util.asyncForEach(anchors, async anchor => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+
+ links.push({
+ quality: quality.trim(),
+ host: host,
+ link: link
+ })
+ })
+ }
+ })
+
+ return links
+ }
+
+ /**
+ * Parse batch episode page that using smokeurl element.
+ * @param page episode page instance.
+ */
+ async parseSmokeUrl(page) {
+ const links = []
+ const smokeurls = await page.$$('.smokeurl')
+ await Util.asyncForEach(smokeurls, async smokeurl => {
+ const quality = await smokeurl.$eval('strong', node => node.innerText)
+ const anchors = await smokeurl.$$('a')
+ await Util.asyncForEach(anchors, async anchor => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+
+ links.push({
+ quality: quality.trim(),
+ host: host,
+ link: link
+ })
+ })
+ })
+
+ return links
+ }
+}
+
+module.exports = new Neonime
\ No newline at end of file
diff --git a/services/Oploverz.js b/fansubs/Oploverz.js
similarity index 56%
rename from services/Oploverz.js
rename to fansubs/Oploverz.js
index 0474d64..497a7e9 100644
--- a/services/Oploverz.js
+++ b/fansubs/Oploverz.js
@@ -1,18 +1,15 @@
+const Browser = require('../Browser')
const Util = require('../utils/utils')
const Handler = require('../exceptions/Handler')
const { oploverz_url } = require('../config.json')
class Oploverz {
- constructor(browser) {
- this.browser = browser
- }
-
/**
* Check on going page and get latest released episodes.
*/
- async checkOnGoingPage() {
+ async newReleases() {
const anime = []
- const page = await this.browser.newOptimizedPage()
+ const page = await Browser.newOptimizedPage()
try {
await page.setUserAgent('Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.83 Safari/537.1')
@@ -25,9 +22,9 @@ class Oploverz {
const list = await page.$$('#content > div.postbody > div.boxed > div.right > div.lts > ul > li')
await Util.asyncForEach(list, async item => {
const anchor = await item.$('div.dtl > h2 > a')
- const link = await this.browser.getPlainProperty(anchor, 'href')
- const title = await this.browser.getPlainProperty(anchor, 'innerText')
- const matchEps = link.match(/(\d+)(?=-subtitle-indonesia)/)
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+ const title = await Browser.getPlainProperty(anchor, 'innerText')
+ const matchEps = rawLink.match(/(\d+)(?=-subtitle-indonesia)/)
if (matchEps && matchEps != null) {
const numeral = matchEps[0].length == 1 ? '0' + matchEps[0] : matchEps[0]
const matchTitles = title.match(/(.+)(?= \d+)/, '')
@@ -36,7 +33,8 @@ class Oploverz {
anime.push({
episode: numeral,
title: parsedTitle,
- link: link
+ raw_link: rawLink,
+ link: rawLink.replace(oploverz_url, '')
})
}
}
@@ -52,22 +50,50 @@ class Oploverz {
}
}
+ /**
+ * Parse and get anime list.
+ */
+ async animeList() {
+ const animeList = []
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ await page.goto(oploverz_url + '/series/')
+
+ const anchors = await Browser.$$waitAndGet(page, 'div.postbody > .movlist> ul > li > a')
+ await Util.asyncForEach(anchors, async (anchor) => {
+ const title = await Browser.getPlainProperty(anchor, 'innerHTML')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+
+ animeList.push({
+ title: title,
+ link: rawLink.replace(oploverz_url, ''),
+ raw_link: rawLink
+ })
+ })
+
+ await page.close()
+
+ return animeList
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
/**
* Parse series page and get episode list.
* @param link series page.
*/
- async series(link) {
+ async episodes(link) {
const episodes = []
- const page = await this.browser.newOptimizedPage()
+ const page = await Browser.newOptimizedPage()
try {
link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
-
+ await page.goto(oploverz_url + link)
await Util.sleep(15000)
-
const list = await page.$$('#content > div.postbody > div > div.episodelist > ul > li')
await Util.asyncForEach(list, async (item, index) => {
if (index >= 30) {
@@ -75,12 +101,16 @@ class Oploverz {
}
const anchor = await item.$('span.leftoff > a')
- const episode = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
+ const episode = await Browser.getPlainProperty(anchor, 'innerText')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+ const link = rawLink.replace(oploverz_url, '')
+ const episodeMatches = episode.match(/([\d-]+)/g)
+ const numeral = episodeMatches[0].length == 1 ? '0' + episodeMatches[0] : episodeMatches[0]
episodes.push({
- episode: episode,
- link: link
+ episode: numeral,
+ link: link,
+ raw_link: rawLink
})
})
@@ -99,15 +129,13 @@ class Oploverz {
* Parse download links from episode page.
* @param link episode page.
*/
- async getDownloadLinks(link) {
- const page = await this.browser.newOptimizedPage()
+ async links(link) {
+ const page = await Browser.newOptimizedPage()
const downloadLinks = []
try {
link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
+ await page.goto(oploverz_url + link)
await Util.sleep(15000)
@@ -116,12 +144,13 @@ class Oploverz {
const sorattls = await soraddl.$$('div[class="sorattl title-download"]')
const soraurls = await soraddl.$$('div[class="soraurl list-download"]')
await Util.asyncForEach(soraurls, async (soraurl, index) => {
- let quality = await this.browser.getPlainProperty(sorattls[index], 'innerText')
- quality = quality.replace('oploverz – ', '')
+ let quality = await Browser.getPlainProperty(sorattls[index], 'innerText')
+ quality = this.parseQuality(quality)
+
const anchors = await soraurl.$$('a')
await Util.asyncForEach(anchors, async anchor => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
downloadLinks.push({
quality: quality,
@@ -142,41 +171,31 @@ class Oploverz {
}
}
- parseTravelling(link) {
- const params = Util.getAllUrlParams(link)
-
- return Util.base64Decode(params.r)
- }
-
- async hexa(link) {
- const page = await this.browser.newOptimizedPage()
- try {
- link = decodeURIComponent(link)
-
- if (link.includes('travellinginfos.com')) {
- link = this.parseTravelling(link)
- }
+ parseQuality(quality) {
+ let result = ''
- if (link.includes('kontenajaib.xyz')) {
- link = this.parseTravelling(link)
- }
-
- await page.goto(link, {
- timeout: 300000
- })
+ if (quality.match(/(x265)/i)) {
+ result += 'x265'
+ } else if (quality.match(/(MKV)/i)) {
+ result += 'MKV'
+ } else {
+ result += 'MP4'
+ }
- await Util.sleep(7000)
- const anchor = await page.$('center.link-content > a')
- const url = await this.browser.getPlainProperty(anchor, 'href')
- await page.close()
-
- return {url: url}
- } catch (error) {
- await page.close()
+ if (quality.match(/(1080p)/i)) {
+ result += ' 1080p'
+ } else if (quality.match(/(720p)/i)) {
+ result += ' 720p'
+ } else if (quality.match(/(480p)/i)) {
+ result += ' 480p'
+ }
- return Handler.error(error)
+ if (quality.match(/(10bit)/i)) {
+ result += ' 10bit'
}
+
+ return result
}
}
-module.exports = Oploverz
\ No newline at end of file
+module.exports = new Oploverz
\ No newline at end of file
diff --git a/fansubs/Samehadaku.js b/fansubs/Samehadaku.js
new file mode 100644
index 0000000..26290b1
--- /dev/null
+++ b/fansubs/Samehadaku.js
@@ -0,0 +1,190 @@
+const Browser = require('../Browser')
+const Util = require('../utils/utils')
+const Handler = require('../exceptions/Handler')
+const { samehadaku_url } = require('../config.json')
+
+class Samehadaku {
+ /**
+ * Parse and get new released episodes.
+ * @param navPage Navigation page.
+ */
+ async newReleases(navPage = 1) {
+ const episodes = []
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ await page.goto(`${samehadaku_url}/page/${navPage}/`)
+
+ const posts = await Browser.$$waitAndGet(page, 'div.white.updateanime > ul > li')
+ await Util.asyncForEach(posts, async (post) => {
+ const anchor = await post.$('h2.entry-title a')
+ const title = await Browser.getPlainProperty(anchor, 'innerText')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+
+ const parsedTitle = title.split(' Episode')[0]
+ const matches = rawLink.match(/(?<=episode-)(\d+)/)
+ if (matches && matches != null) {
+ const numeral = matches[0].length == 1 ? '0' + matches[0] : matches[0]
+
+ episodes.push({
+ episode: numeral,
+ title: parsedTitle,
+ link: rawLink.replace(samehadaku_url, ''),
+ raw_link: rawLink
+ })
+ }
+ })
+
+ await page.close()
+
+ return episodes
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse and get anime list.
+ */
+ async animeList() {
+ const animeList = []
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ await page.goto(samehadaku_url + '/daftar-anime/?list')
+
+ const anchors = await Browser.$$waitAndGet(page, 'div.daftarkartun a.tip')
+ await Util.asyncForEach(anchors, async (anchor) => {
+ const title = await Browser.getPlainProperty(anchor, 'innerHTML')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+
+ animeList.push({
+ title: title,
+ link: rawLink.replace(samehadaku_url, ''),
+ raw_link: rawLink
+ })
+ })
+
+ await page.close()
+
+ return animeList
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse tv show page and get episodes.
+ * @param link tv show page.
+ */
+ async episodes(link) {
+ const episodes = []
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ link = link.replace('/category/', '/anime/')
+ await page.goto(samehadaku_url + link)
+ const episodeList = await Browser.$$waitAndGet(page, 'div.episodelist > ul > li')
+ await Util.asyncForEach(episodeList, async (item) => {
+ const anchor = await item.$('span.lefttitle > a')
+ const episode = await Browser.getPlainProperty(anchor, 'innerText')
+ const rawLink = await Browser.getPlainProperty(anchor, 'href')
+ const link = rawLink.replace(samehadaku_url, '')
+ let numeral = episode
+
+ if (!link.match(/(opening)/) && !link.match(/(ending)/)) {
+ const episodeMatches = episode.match(/([\d-]+)/g)
+ const ovaMatches = link.match(/-ova/)
+ const ovaMatches2 = link.match(/ova-/)
+
+ if (episodeMatches && episodeMatches != null) {
+ numeral = episodeMatches[0].length == 1 ? '0' + episodeMatches[0] : episodeMatches[0]
+ } else if ((ovaMatches && ovaMatches != null) || (ovaMatches2 && ovaMatches2 != null)) {
+ numeral = episode
+ }
+
+ episodes.push({
+ episode: numeral,
+ link: link,
+ raw_link: rawLink
+ })
+ }
+ })
+ await page.close()
+
+ return episodes
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ /**
+ * Parse download links from episode page of a title.
+ * @param link episode page.
+ */
+ async links(link) {
+ const page = await Browser.newOptimizedPage()
+ const downloadLinks = []
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(samehadaku_url + link)
+
+ await page.waitForSelector('div.download-eps')
+ const downloadDivs = await page.$$('div.download-eps')
+ await Util.asyncForEach(downloadDivs, async downloadDiv => {
+ const p = await page.evaluateHandle(node => node.previousElementSibling, downloadDiv)
+ let format = await Browser.getPlainProperty(p, 'innerText')
+ format = format.replace('', '')
+ .replace('', '')
+ .replace(/(&)/, '')
+
+ if (format.match(/(3gp)/i)) {
+ return false
+ } else if (format.match(/(MKV)/i)) {
+ format = 'MKV'
+ } else if (format.match(/(265)/i)) {
+ format = 'x265'
+ } else if (format.match(/(MP4)/i)) {
+ format = 'MP4'
+ }
+
+ const list = await downloadDiv.$$('li')
+ await Util.asyncForEach(list, async item => {
+ const strong = await item.$('strong')
+ if (strong && strong != null) {
+ const quality = await Browser.getPlainProperty(strong, 'innerText')
+ const anchors = await item.$$('a')
+ await Util.asyncForEach(anchors, async anchor => {
+ const host = await Browser.getPlainProperty(anchor, 'innerText')
+ const link = await Browser.getPlainProperty(anchor, 'href')
+
+ downloadLinks.push({
+ quality: (`${format} ${quality}`).trim(),
+ host: host,
+ link: link
+ })
+ })
+ }
+ })
+ })
+
+ await page.close()
+
+ return downloadLinks
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Samehadaku
\ No newline at end of file
diff --git a/services/Kiryuu.js b/fantls/Kiryuu.js
similarity index 85%
rename from services/Kiryuu.js
rename to fantls/Kiryuu.js
index 1c3e488..fa840b7 100644
--- a/services/Kiryuu.js
+++ b/fantls/Kiryuu.js
@@ -1,18 +1,15 @@
+const Browser = require('../Browser')
const Util = require('../utils/utils')
const Handler = require('../exceptions/Handler')
const { kiryuu_url } = require('../config.json')
class Kiryuu {
- constructor(browser) {
- this.browser = browser
- }
-
/**
* Get manga list from manga list page.
*
*/
- async getMangaList() {
- const page = await this.browser.newOptimizedPage()
+ async mangaList() {
+ const page = await Browser.newOptimizedPage()
try {
await page.goto(kiryuu_url + '/manga/?list', {
@@ -24,12 +21,13 @@ class Kiryuu {
const soraList = await page.$('div.soralist')
const anchors = await soraList.$$('a.series')
await Util.asyncForEach(anchors, async (anchor) => {
- const title = await this.browser.getPlainProperty(anchor, 'innerHTML')
- const link = await this.browser.getPlainProperty(anchor, 'href')
+ const title = await Browser.getPlainProperty(anchor, 'innerHTML')
+ const link = await Browser.getPlainProperty(anchor, 'href')
mangaList.push({
title: title,
- link: link.replace(kiryuu_url, '')
+ link: link.replace(kiryuu_url, ''),
+ raw_link: link
})
})
@@ -48,8 +46,8 @@ class Kiryuu {
*
* @param {String} link Manga page url.
*/
- async getMangaInfo(link) {
- const page = await this.browser.newOptimizedPage()
+ async mangaInfo(link) {
+ const page = await Browser.newOptimizedPage()
let cover = null
try {
@@ -135,14 +133,8 @@ class Kiryuu {
*
* @param {String} link Manga page url.
*/
- async getChapters(link) {
- if (!this.browser)
- return {
- error: true,
- message: 'Browser not ready.'
- }
-
- const page = await this.browser.newOptimizedPage()
+ async chapters(link) {
+ const page = await Browser.newOptimizedPage()
try {
link = decodeURIComponent(kiryuu_url + link)
@@ -155,13 +147,14 @@ class Kiryuu {
const bixbox = await page.$('div.bixbox.bxcl')
const anchors = await bixbox.$$('span.lchx > a')
await Util.asyncForEach(anchors, async (anchor) => {
- const chapter = await this.browser.getPlainProperty(anchor, 'innerHTML')
- const link = await this.browser.getPlainProperty(anchor, 'href')
+ const chapter = await Browser.getPlainProperty(anchor, 'innerHTML')
+ const link = await Browser.getPlainProperty(anchor, 'href')
if (chapter && chapter !== '') {
chapters.push({
chapter: chapter.replace(/Chapter /gi, ''),
- link: link.replace(kiryuu_url, '')
+ link: link.replace(kiryuu_url, ''),
+ raw_link: link
})
}
})
@@ -181,14 +174,8 @@ class Kiryuu {
*
* @param {String} link Chapter page url.
*/
- async getImages(link) {
- if (!this.browser)
- return {
- error: true,
- message: 'Browser not ready.'
- }
-
- const page = await this.browser.newOptimizedPage()
+ async images(link) {
+ const page = await Browser.newOptimizedPage()
try {
link = decodeURIComponent(kiryuu_url + link)
@@ -212,11 +199,11 @@ class Kiryuu {
}
await Util.asyncForEach(imgs, async (img, index) => {
- const src = await this.browser.getPlainProperty(img, 'src')
+ const src = await Browser.getPlainProperty(img, 'src')
images.push({
index: index,
- link: src
+ url: src
})
})
@@ -237,8 +224,8 @@ class Kiryuu {
* Get new releases from home page.
*
*/
- async getNewReleases() {
- const page = await this.browser.newOptimizedPage()
+ async newReleases() {
+ const page = await Browser.newOptimizedPage()
try {
await page.goto(kiryuu_url, {
@@ -268,8 +255,10 @@ class Kiryuu {
releases.push({
title: title,
title_url: titleLink.replace(kiryuu_url, ''),
+ raw_title_url: titleLink,
chapter: chapter.replace(/Ch. |Ch./gi, ''),
- chapter_url: chapterLink.replace(kiryuu_url, '')
+ chapter_url: chapterLink.replace(kiryuu_url, ''),
+ raw_chapter_url: chapterLink
})
})
@@ -284,4 +273,4 @@ class Kiryuu {
}
}
-module.exports = Kiryuu
\ No newline at end of file
+module.exports = new Kiryuu
\ No newline at end of file
diff --git a/index.js b/index.js
index 6e0a700..efbb5b8 100644
--- a/index.js
+++ b/index.js
@@ -1,7 +1,7 @@
const express = require('express')
const routes = require('./routes')
const app = express()
-const Browser = require('./services/Browser')
+const Browser = require('./Browser')
const { app_port } = require('./config.json')
const runningPort = process.env.PORT || app_port
diff --git a/package.json b/package.json
index 9aeeefe..a1116e5 100644
--- a/package.json
+++ b/package.json
@@ -1,17 +1,18 @@
{
"name": "shallty",
- "version": "1.5.5",
+ "version": "2.0.0",
"description": "Shallty adalah aplikasi untuk meng-crawl situs fastsub/fanshare Indonesia. Tujuan utamanya adalah untuk melewati berbagai halaman redirect dan mengambil tautan unduh aslinya. Saat ini Shallty telah mendukung crawling untuk Meownime, Samehadaku, Neonime, dan Oploverz. https://shallty.kyuun.id",
"main": "index.js",
"scripts": {
+ "start": "node index.js",
"test": "node ./node_modules/mocha/bin/mocha --recursive --exit",
"unit-test": "node ./node_modules/mocha/bin/mocha --recursive --exit ./test/unit/",
"int-test": "node ./node_modules/mocha/bin/mocha --recursive --exit ./test/integration/",
- "f-unit-test": "node ./node_modules/mocha/bin/mocha --recursive --exit ./test/unit/ --grep",
- "f-int-test": "node ./node_modules/mocha/bin/mocha --recursive --exit ./test/integration/ --grep"
+ "unit-test-filtered": "node ./node_modules/mocha/bin/mocha --recursive --exit ./test/unit/ --grep",
+ "int-test-filtered": "node ./node_modules/mocha/bin/mocha --recursive --exit ./test/integration/ --grep"
},
"author": "gegehprast98",
- "license": "GPL-3.0-only",
+ "license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/gegehprast/shallty.git"
diff --git a/routes/index.js b/routes/index.js
index 977da60..2adb909 100644
--- a/routes/index.js
+++ b/routes/index.js
@@ -1,43 +1,35 @@
const express = require('express')
const route = express.Router()
const SamehadakuController = require('../controllers/SamehadakuController')
-const MeownimeController = require('../controllers/MeownimeController')
const NeonimeController = require('../controllers/NeonimeController')
const OploverzController = require('../controllers/OploverzController')
const KusonimeController = require('../controllers/KusonimeController')
const KiryuuController = require('../controllers/KiryuuController')
const MoenimeController = require('../controllers/MoenimeController')
+const ShortlinkController = require('../controllers/ShortlinkController')
+
+route.get('/samehadaku/animeList', SamehadakuController.animeList)
+route.get('/samehadaku/episodes', SamehadakuController.episodes)
+route.get('/samehadaku/links', SamehadakuController.links)
+route.get('/samehadaku/newReleases', SamehadakuController.newReleases)
-route.get('/meownime/anime', MeownimeController.anime)
-route.get('/meownime/movie', MeownimeController.movie)
-route.get('/meownime/davinsurance', MeownimeController.davinsurance)
-route.get('/meownime/meowbox', MeownimeController.meowbox)
-route.get('/meownime/meowdrive', MeownimeController.meowdrive)
-route.get('/meownime/checkOnGoingPage', MeownimeController.checkOnGoingPage)
-route.get('/meownime/onGoingAnime', MeownimeController.onGoingAnime)
-
-route.get('/samehadaku/anime', SamehadakuController.anime)
-route.get('/samehadaku/checkOnGoingPage', SamehadakuController.checkOnGoingPage)
-route.get('/samehadaku/getDownloadLinks', SamehadakuController.getDownloadLinks)
-route.get('/samehadaku/tetew', SamehadakuController.tetew)
-route.get('/samehadaku/njiir', SamehadakuController.njiir)
-
-route.get('/neonime/checkOnGoingPage', NeonimeController.checkOnGoingPage)
route.get('/neonime/animeList', NeonimeController.animeList)
-route.get('/neonime/tvShow', NeonimeController.tvShow)
-route.get('/neonime/getEpisodes', NeonimeController.getEpisodes) // including download links
-route.get('/neonime/hightech', NeonimeController.hightech)
-route.get('/neonime/getBatchEpisodes', NeonimeController.getBatchEpisodes) // including download links
+route.get('/neonime/episodes', NeonimeController.episodes)
+route.get('/neonime/links', NeonimeController.links)
+route.get('/neonime/newReleases', NeonimeController.newReleases)
-route.get('/oploverz/checkOnGoingPage', OploverzController.checkOnGoingPage)
-route.get('/oploverz/series', OploverzController.series)
-route.get('/oploverz/getDownloadLinks', OploverzController.getDownloadLinks)
-route.get('/oploverz/hexa', OploverzController.hexa)
+route.get('/oploverz/animeList', OploverzController.animeList)
+route.get('/oploverz/episodes', OploverzController.episodes)
+route.get('/oploverz/links', OploverzController.links)
+route.get('/oploverz/newReleases', OploverzController.newReleases)
-route.get('/kusonime/homePage', KusonimeController.homePage)
route.get('/kusonime/animeList', KusonimeController.animeList)
-route.get('/kusonime/getDownloadLinks', KusonimeController.getDownloadLinks)
-route.get('/kusonime/semrawut', KusonimeController.semrawut)
+route.get('/kusonime/links', KusonimeController.links)
+route.get('/kusonime/newReleases', KusonimeController.newReleases)
+
+route.get('/moenime/animeList', MoenimeController.animeList)
+route.get('/moenime/links', MoenimeController.links)
+route.get('/moenime/newReleases', MoenimeController.newReleases)
route.get('/kiryuu/mangaList', KiryuuController.mangaList)
route.get('/kiryuu/mangaInfo', KiryuuController.mangaInfo)
@@ -45,9 +37,6 @@ route.get('/kiryuu/chapters', KiryuuController.chapters)
route.get('/kiryuu/images', KiryuuController.images)
route.get('/kiryuu/newReleases', KiryuuController.newReleases)
-route.get('/moenime/animeList', MoenimeController.animeList)
-route.get('/moenime/episodes', MoenimeController.episodes)
-route.get('/moenime/newReleases', MoenimeController.newReleases)
-route.get('/moenime/teknoku', MoenimeController.teknoku)
+route.get('/shortlink', ShortlinkController.index)
module.exports = route
\ No newline at end of file
diff --git a/services/Kusonime.js b/services/Kusonime.js
deleted file mode 100644
index 05f0845..0000000
--- a/services/Kusonime.js
+++ /dev/null
@@ -1,294 +0,0 @@
-const Util = require('../utils/utils')
-const Handler = require('../exceptions/Handler')
-const { kusonime_url } = require('../config.json')
-
-class Kusonime {
- constructor(browser) {
- this.browser = browser
- }
-
- /**
- * Parse and get anime list. Currently support only up to page 2.
- */
- async animeList() {
- const page = await this.browser.newOptimizedPage()
-
- try {
- let animeList = []
- for (let i = 1; i < 3; i++) {
- await page.goto(`${kusonime_url}/anime-list-batch/${i > 1 ? `page/${i}/`: ''}`, {
- timeout: 300000
- })
-
- await page.waitForSelector('a.kmz')
- const list = await page.$$eval('a.kmz', nodes => nodes.map(x => ({
- link: x.href,
- title: x.innerText
- })))
-
- animeList = animeList.concat(list)
- }
- await page.close()
-
- return animeList
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse home page and get post list.
- *
- * @param {Number} homePage Home page.
- */
- async homePage(homePage = 1) {
- const page = await this.browser.newOptimizedPage()
- const posts = []
-
- try {
- await page.goto(`${kusonime_url}/page/${homePage}`, {
- timeout: 300000
- })
-
- await page.waitForSelector('div.venz')
- const kovers = await page.$$('div.venz > ul > div.kover')
-
- await Util.asyncForEach(kovers, async (kover) => {
- const anchor = await kover.$('.episodeye > a')
- const title = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- posts.push({
- link: link,
- title: title
- })
- })
-
- await page.close()
-
- return posts
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse download links from smokeurls div.
- *
- * @param smokeurls ElementHandle.
- * @param {String} episodeTitle Episode title.
- */
- async parseSmokeurl(smokeurls, episodeTitle) {
- const downloadLinks = []
- await Util.asyncForEach(smokeurls, async (smokeurl) => {
- const anchors = await smokeurl.$$('a')
- const strong = await smokeurl.$('strong')
- if (typeof strong == 'undefined' || !strong) {
- return false
- }
-
- const quality = await this.browser.getPlainProperty(strong, 'innerText')
-
- await Util.asyncForEach(anchors, async (anchor) => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- const episode = {
- 'episode': episodeTitle,
- 'quality': quality,
- 'host': host,
- 'link': link
- }
-
- downloadLinks.push(episode)
- })
- })
-
- return downloadLinks
- }
-
- /**
- * Parse download links from episode page of a title that does not have smokeddl div.
- *
- * @param smokeddls dlbod ElementHandle.
- */
- async parseSmokeddl(smokeddls) {
- let downloadLinks = []
- await Util.asyncForEach(smokeddls, async (smokeddl) => {
- let smokettl = await smokeddl.$('div.smokettl')
- if (typeof smokettl == 'undefined' || !smokettl) {
- smokettl = await smokeddl.$('.smokeurl:nth-child(1)')
- }
- const episodeTitle = await this.browser.getPlainProperty(smokettl, 'innerText')
- const smokeurls = await smokeddl.$$('div.smokeurl')
- const newDownloadLinks = await this.parseSmokeurl(smokeurls, episodeTitle)
- downloadLinks = downloadLinks.concat(newDownloadLinks)
- })
-
- return downloadLinks
- }
-
- /**
- * Parse download links from episode page of a title that does not have smokeddl div.
- *
- * @param dlbod dlbod ElementHandle.
- */
- async parseZeroSmodeddl(dlbod) {
- let smokettl = await dlbod.$('div.smokettl')
- if (typeof smokettl == 'undefined' || !smokettl) {
- smokettl = await dlbod.$('.smokeurl:nth-child(1)')
- }
- const episodeTitle = await this.browser.getPlainProperty(smokettl, 'innerText')
- const smokeurls = await dlbod.$$('div.smokeurl')
- const downloadLinks = await this.parseSmokeurl(smokeurls, episodeTitle)
-
- return downloadLinks
- }
-
- /**
- * Parse download links from episode page of a title.
- *
- * @param {String} link Episode page url.
- */
- async getDownloadLinks(link) {
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
-
- const dlbod = await this.browser.waitAndGetSelector(page, 'div.dlbod')
- const smokeddls = await dlbod.$$('div.smokeddl')
- const info = await page.$('div.info > p:nth-child(6)')
- const status = await this.browser.getPlainProperty(info, 'innerText')
- const downloadLinks = smokeddls.length > 0 ? await this.parseSmokeddl(smokeddls) : await this.parseZeroSmodeddl(dlbod)
-
- await page.close()
-
- return {
- status: (status && status == 'Status: Completed') ? 'completed' : 'airing',
- links: downloadLinks
- }
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse kepoow and get original download link.
- *
- * @param {String} link kepoow url.
- */
- parseKepoow(params) {
- return {
- url: Util.base64Decode(params.r)
- }
- }
-
- /**
- * Parse sukakesehattan and get original download link.
- *
- * @param {String} link sukakesehattan url.
- */
- parseSukakesehattan(params) {
- return {
- url: params.url
- }
- }
-
- /**
- * Parse jelajahinternet and get original download link.
- *
- * @param {String} link jelajahinternet url.
- */
- parseJelajahinternet(params) {
- return {
- url: params.url
- }
- }
-
- async waitGetLinkElementToShowUp(downloadButton) {
- let classProp = await this.browser.getPlainProperty(downloadButton, 'className')
- do {
- await Util.sleep(5000)
- classProp = await this.browser.getPlainProperty(downloadButton, 'className')
- console.log(classProp)
- } while (classProp !== 'get-link')
-
- return true
- }
-
- async parseSemawur(link) {
- const page = await this.newOptimizedPageWithNewContext()
-
- try {
- await page.goto(link, {
- timeout: 300000
- })
-
- await page.waitForSelector('#link-view > button')
- await Promise.all([
- page.waitForNavigation({
- timeout: 0,
- waitUntil: 'networkidle2'
- }),
- page.click('#link-view > button')
- ])
- await page.waitForSelector('a.get-link')
- await Util.sleep(5000)
- const downloadButton = await page.$('a.get-link')
- await this.waitGetLinkElementToShowUp(downloadButton)
- const downloadLinks = await this.browser.getPlainProperty(downloadButton, 'href')
-
- await this.browser.closePage(page)
-
- return {
- url: downloadLinks
- }
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Proceed semawur to get original download link.
- *
- * @param {String} link URL decoded semawur url.
- */
- async semrawut(link) {
- link = decodeURIComponent(link)
- const params = Util.getAllUrlParams(link)
-
- if (link.includes('kepoow.me')) {
- return this.parseKepoow(params)
- }
-
- if (link.includes('sukakesehattan.')) {
- return this.parseSukakesehattan(params)
- }
-
- if (link.includes('jelajahinternet.')) {
- return this.parseJelajahinternet(params)
- }
-
- if (Object.entries(params).length > 0 && params.url) {
- return {
- url: decodeURIComponent(params.url).replace(/\++/g, ' ')
- }
- }
-
- return await this.parseSemawur(link)
- }
-}
-
-module.exports = Kusonime
\ No newline at end of file
diff --git a/services/Meownime.js b/services/Meownime.js
deleted file mode 100644
index 5de2541..0000000
--- a/services/Meownime.js
+++ /dev/null
@@ -1,374 +0,0 @@
-const Util = require('../utils/utils')
-const Handler = require('../exceptions/Handler')
-const { meownime_url } = require('../config.json')
-
-class Meownime {
- constructor(browser) {
- this.browser = browser
- }
-
- /**
- * Parse episodes from completed anime.
- * @param link anime page.
- */
- async getEpisodes(link) {
- const page = await this.browser.newOptimizedPage()
- const episodes = new Map
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
-
- const dlLinks = await page.$$('article > div > div > div.meow-dl-link')
-
- await Util.asyncForEach(dlLinks, async (dlLink) => {
- const quality = await dlLink.$eval('.tombol', node => node.innerText)
- const episodeDivs = await dlLink.$$('div.isi-dl > table > tbody > tr:not([bgcolor="#eee"])')
- const dlLinkDivs = await dlLink.$$('div.isi-dl > table > tbody > tr[bgcolor="#eee"]')
-
- await Util.asyncForEach(episodeDivs, async (episodeDiv, index) => {
- let alpha, size
- const episode = await episodeDiv.$eval('td', node => node.innerText)
- if (!episode.toLowerCase().includes('batch')) {
- const episodeArr = episode.split(' — ')
- alpha = episodeArr[0]
- size = episodeArr[1]
- } else {
- const episodeArr = episode.split(' | ')
- alpha = episodeArr[0]
- size = episodeArr[1]
- }
-
- const fileHosts = await dlLinkDivs[index].$$eval('a', nodes => nodes.map(n => {
- return {
- host: n.innerText,
- link: n.href
- }
- }))
-
- if (!episodes.has(alpha)) {
- episodes.set(alpha, [])
- }
- const epAlpha = episodes.get(alpha)
- epAlpha.push({
- quality: `${quality} - ${size}`,
- fileHosts: fileHosts
- })
- episodes.set(alpha, epAlpha)
- })
- })
-
- await page.close()
-
- return Array.from(episodes)
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse davinsurance and get the final url such as zippy, meowfiles, meowbox, meowcloud, meowdrive, etc.
- * @param link davinsurance page.
- */
- async davinsurance(link) {
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
-
- await Promise.all([
- page.waitForNavigation({
- timeout: 0,
- waitUntil: 'networkidle2'
- }),
- page.click('#srl > form > input.sorasubmit'),
- ])
-
- const fullContent = await page.content()
- await page.close()
-
- // eslint-disable-next-line quotes
- let splitted = fullContent.split("function changeLink(){var a='")
- splitted = splitted[1].split(';window.open')
- splitted = splitted[0].replace(/(['"])+/g, '')
-
- return {url: splitted}
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse meowbox and get the final url such as google drive.
- * Sometimes will return meowbox url if something wrong happens.
- * @param link meowbox page.
- */
- async meowbox(link) {
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
-
- try {
- await page.waitForNavigation({
- timeout: 10000,
- waitUntil: 'domcontentloaded'
- })
- } catch (error) {
- console.log(error)
- }
-
- const currentUrl = page.url()
- if (currentUrl.includes('login.php')) {
- await page.waitForSelector('#inputEmail')
- await page.type('#inputEmail', 'shalltyanime', {
- delay: 100
- })
-
- await page.waitForSelector('#inputPassword')
- await page.type('#inputPassword', '7bmAyN6XWHnzwRF', {
- delay: 100
- })
-
- await page.waitForSelector('#Login > button')
- await Promise.all([
- page.waitForNavigation({
- timeout: 0,
- waitUntil: 'networkidle2'
- }),
- page.click('#Login > button'),
- page.waitForNavigation({
- timeout: 0,
- waitUntil: 'networkidle2'
- }),
- ])
- }
-
- await page.waitForSelector('#page-top > header > div > form > button.download')
- await Promise.all([
- page.waitForNavigation({
- timeout: 0,
- waitUntil: 'networkidle2'
- }),
- page.click('#page-top > header > div > form > button.download'),
- ])
-
- await Util.sleep(5000)
-
- const finalUrl = page.url()
- await page.close()
-
- return {url: finalUrl}
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse meowdrive or meowcloud and get the final url such as google drive.
- * Sometimes will return meowbox url if something wrong happens.
- * @param link meowdrive or meowcloud page.
- */
- async meowdrive(link) {
- let finalUrl
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
-
- await page.waitForSelector('#ddl > ul > li:nth-child(2) > a')
- await Promise.all([
- page.waitForNavigation({
- timeout: 0,
- waitUntil: 'networkidle2'
- }),
- page.click('#ddl > ul > li:nth-child(2) > a')
- ])
-
- const currentUrl = page.url()
- if (currentUrl.includes('meowbox')) {
- const meowboxLink = encodeURI(currentUrl)
- const { url } = await this.meowbox(meowboxLink)
- finalUrl = url
- } else {
- finalUrl = currentUrl
- }
-
- await page.close()
-
- return {url: finalUrl}
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Get all title from on going page.
- */
- async checkOnGoingPage() {
- const anime = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- await page.goto(meownime_url + '/tag/ongoing/', {
- timeout: 300000
- })
-
- await page.waitForSelector('article')
- const articles = await page.$$('article')
- await Util.asyncForEach(articles, async (article, index) => {
- const episode = await article.$eval('div > div.postedon', node => node.innerText)
- const info = await article.$eval('div > div.out-thumb > h1 > a', node => {
- return {title: node.innerText, link: node.href}
- })
- // remove meownime url and trailing slash
- let link = info.link.replace(meownime_url, '').replace(/\/+$/, '')
- anime[index] = {
- episode: episode.split(' ')[1],
- title: info.title,
- link: link
- }
- })
-
- await page.close()
-
- return anime
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse episodes from on going anime.
- * @param link anime page.
- */
- async onGoingAnime(link) {
- const episodes = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
-
- await page.waitForSelector('tr[bgcolor="#eee"]')
- const tRowsHandle = await page.$$('tr[bgcolor="#eee"]')
- await Util.asyncForEach(tRowsHandle, async tRowHandle => {
- // search for previous sibling table element
- let tableHandle = await page.evaluateHandle(tRow => {
- return tRow.parentElement.previousElementSibling
- }, tRowHandle)
- // search again if table element is null
- if (tableHandle.asElement() == null) {
- tableHandle = await page.evaluateHandle(tRow => {
- return tRow.parentElement.parentElement.previousElementSibling
- }, tRowHandle)
- }
-
- try {
- let episode = await tableHandle.$eval('center', node => node.innerText)
- const matches = episode.match(/Episode ([0-9])+/g)
- if (matches && matches != null) {
- const episodeAlpha = matches[0]
- const episodeNumeral = episodeAlpha.split(' ')[1].length == 1 ?
- '0' + episodeAlpha.split(' ')[1] :
- episodeAlpha.split(' ')[1]
- const qualityHandle = await page.evaluateHandle(tRow => tRow.previousElementSibling, tRowHandle)
- const quality = await (await qualityHandle.getProperty('innerText')).jsonValue()
-
- const anchorsHandle = await tRowHandle.$$('a')
- await Util.asyncForEach(anchorsHandle, async anchorHandle => {
- const host = await (await anchorHandle.getProperty('innerText')).jsonValue()
- const link = await (await anchorHandle.getProperty('href')).jsonValue()
-
- episodes.push({
- episodeAlpha: episodeAlpha,
- episodeNumeral: episodeNumeral,
- quality: quality,
- host: host,
- link: link
- })
- })
- }
- } catch (error) {
- console.log(error)
- }
- })
-
- await page.close()
-
- return episodes
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse episodes from movie anime.
- * @param link anime page.
- */
- async getMovieEpisodes(link) {
- const page = await this.browser.newOptimizedPage()
- const episodes = []
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 300000
- })
-
- await page.waitForSelector('table[class=" table table-hover"]:not(style)')
- const tables = await page.$$('table[class=" table table-hover"]:not(style)')
- await Util.asyncForEach(tables, async table => {
- const tRows = await table.$$('tr')
- if (tRows.length > 1) {
- const quality = await table.$eval('tr', node => node.innerText)
- const downloadLinks = await tRows[1].$$eval('a', nodes => nodes.map(n => {
- return {
- host: n.innerText,
- link: n.href
- }
- }))
- episodes.push({
- quality: quality,
- downloadLinks: downloadLinks
- })
- }
- })
-
- await page.close()
-
- return episodes
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-}
-
-module.exports = Meownime
\ No newline at end of file
diff --git a/services/Neonime.js b/services/Neonime.js
deleted file mode 100644
index 15ef86b..0000000
--- a/services/Neonime.js
+++ /dev/null
@@ -1,293 +0,0 @@
-const Util = require('../utils/utils')
-const Handler = require('../exceptions/Handler')
-const { neonime_url } = require('../config.json')
-
-class Neonime {
- constructor(browser) {
- this.browser = browser
- }
-
- /**
- * Get new tab page instance.
- * @param page current page.
- * @param browser current browser.
- */
- async newPagePromise(page, browser) {
- const pageTarget = page.target()
- const newTarget = await browser.waitForTarget(target => target.opener() === pageTarget)
- const newPage = await newTarget.page()
-
- return newPage
- }
-
- /**
- * Parse and get anime list.
- */
- async checkOnGoingPage() {
- const anime = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- await page.goto(neonime_url + '/episode/', {
- timeout: 60000
- })
-
- await page.waitForSelector('table.list')
- const table = await page.$('table.list')
- const tRows = await table.$$('tbody > tr')
- await Util.asyncForEach(tRows, async trow => {
- const anchor = await trow.$('td.bb > a')
- const text = await this.browser.getPlainProperty(anchor, 'innerText')
- const episodeSplit = text.split(' Episode ')
- const titleSplit = text.split(' Subtitle')
- const episode = episodeSplit[episodeSplit.length - 1]
- const title = titleSplit[0]
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- anime.push({
- episode: episode,
- title: title,
- link: link
- })
- })
-
-
- await page.close()
-
- return anime
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse and get anime list.
- */
- async animeList() {
- const page = await this.browser.newOptimizedPage()
-
- try {
- await page.goto(neonime_url + '/list-anime/', {
- timeout: 60000
- })
-
- await page.waitForSelector('#az-slider')
- const slider = await page.$('#az-slider')
- const animeList = await slider.$$eval('a', nodes => nodes.map(x => {
- const title = x.innerText
- const link = x.href
-
- return {link: link, title: title}
- }))
- await page.close()
-
- return animeList
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse tv show page and get episodes.
- * @param link tv show page.
- */
- async tvShow(link) {
- const episodes = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 60000
- })
-
- await page.waitForSelector('div.episodiotitle')
- const episodios = await page.$$('div.episodiotitle')
- await Util.asyncForEach(episodios, async episodio => {
- const { episode, link } = await episodio.$eval('a', node => (
- {
- episode: node.innerText,
- link: node.href
- }
- ))
-
- episodes.push({
- episode: episode,
- link: link
- })
- })
- await page.close()
-
- return episodes
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse episode page and get download links.
- * @param link episode page.
- */
- async getEpisodes(link) {
- const episodes = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 60000
- })
-
- await page.waitForSelector('div.central > div > ul > ul')
- const list = await page.$$('div > ul > ul')
- await Util.asyncForEach(list, async item => {
- const quality = await item.$eval('label', node => node.innerText)
- const anchors = await item.$$('a')
- await Util.asyncForEach(anchors, async anchor => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- if (link != neonime_url && !host.toLowerCase().includes('proses')) {
- episodes.push({
- quality: quality,
- host: host,
- link: link
- })
- }
- })
- })
-
- await page.close()
-
- return episodes
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse batch episode page and get download links.
- * @param link episode page.
- */
- async getBatchEpisodes(link) {
- const episodes = []
- let info1 = false
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 60000
- })
-
-
- await page.waitForSelector('.smokeurl').catch(e => {
- Handler.error(e)
- info1 = true
- })
-
- if (!info1) {
- const smokeurls = await page.$$('.smokeurl')
- await Util.asyncForEach(smokeurls, async smokeurl => {
- const quality = await smokeurl.$eval('strong', node => node.innerText)
- const anchors = await smokeurl.$$('a')
- await Util.asyncForEach(anchors, async anchor => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- episodes.push({
- quality: quality,
- host: host,
- link: link
- })
- })
- })
- } else {
- await page.waitForSelector('p[data-id="info1"]').catch(async e => {
- await page.close()
-
- return Handler.error(e)
- })
- const smokeurls = await page.$$('p[data-id="info1"]')
- await Util.asyncForEach(smokeurls, async smokeurl => {
- const strong = await smokeurl.$('strong')
- if (strong && strong != null) {
- const quality = await smokeurl.$eval('strong', node => node.innerText)
- const anchors = await smokeurl.$$('a')
- await Util.asyncForEach(anchors, async anchor => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- episodes.push({
- quality: quality,
- host: host,
- link: link
- })
- })
- }
- })
- }
-
-
-
- await page.close()
-
- return episodes
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse high tech.
- * @param link anime page.
- */
- async hightech(link) {
- link = decodeURIComponent(link)
- const params = Util.getAllUrlParams(link)
- if (params.sitex) {
- return {
- url: Util.base64Decode(params.sitex)
- }
- }
-
- const page = await this.browser.newOptimizedPage()
-
- try {
- await page.goto(link, {
- timeout: 60000
- })
-
- await Util.sleep(6000)
- await page.waitForSelector('a[href="#generate"]')
- await page.click('a[href="#generate"]')
- await page.waitForSelector('a#link-download')
- await Util.sleep(3000)
- await page.click('a#link-download')
-
- const newPage = await this.newPagePromise(page, this.browser.browser)
- const url = newPage.url()
-
- await page.close()
- await newPage.close()
-
- return {url: url}
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-}
-
-module.exports = Neonime
\ No newline at end of file
diff --git a/services/Samehadaku.js b/services/Samehadaku.js
deleted file mode 100644
index 4bd434e..0000000
--- a/services/Samehadaku.js
+++ /dev/null
@@ -1,411 +0,0 @@
-const Util = require('../utils/utils')
-const Handler = require('../exceptions/Handler')
-const {
- samehadaku_url,
- samehadaku_magBoxContainer
-} = require('../config.json')
-
-class Samehadaku {
- constructor(browser) {
- this.browser = browser
- }
-
- /**
- * Parse and get episode information from a post element handler.
- * @param post post element handler.
- */
- async parsePostElement(post) {
- const { title, postLink } = await post.$eval('a', node => ({
- title: node.innerText,
- postLink: node.href
- }))
- if (!postLink.match(/(opening)/) && !postLink.match(/(ending)/)) {
- // const matches = postLink.match(/(?<=episode-)(\d+)(?=-subtitle-indonesia)/)
- const matches = postLink.match(/(?<=episode-)(\d+)/)
- if (matches && matches != null) {
- const numeral = matches[0].length == 1 ? '0' + matches[0] : matches[0]
-
- return {
- episode: numeral,
- title: title,
- link: postLink
- }
- }
- }
-
- return null
- }
-
- /**
- * Parse and get episodes from a category/label page.
- * @param link category/label page.
- */
- async getEpisodes(link) {
- let totalPage
- const pageLimit = 3
- const episodes = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 30000
- })
-
- try {
- await page.waitForSelector('#content > div > div > div.pages-nav')
- const pageNav = await page.$('#content > div > div > div.pages-nav')
- let lastPage = await pageNav.$('li.last-page')
- if (!lastPage) {
- lastPage = await pageNav.$$('li:not([class="the-next-page"])')
- lastPage = lastPage[lastPage.length - 1]
- }
- const lastPageLink = await lastPage.$eval('a', node => node.href)
- totalPage = lastPageLink.replace(/\/+$/, '').split('/')
- totalPage = parseInt(totalPage[totalPage.length - 1])
- totalPage = totalPage > pageLimit ? pageLimit : totalPage
- } catch (error) {
- Handler.error(error)
- totalPage = 1
- }
-
-
- const postContainer = await page.$('ul#posts-container')
- const posts = await postContainer.$$('h3.post-title')
- await Util.asyncForEach(posts, async post => {
- const parsedEpisode = await this.parsePostElement(post)
- if (parsedEpisode)
- episodes.push(parsedEpisode)
- })
-
- for (let i = 2; i <= totalPage; i++) {
- await page.goto(link.replace(/\/+$/, '') + `/page/${i}`, {
- timeout: 30000
- })
- await page.waitForSelector('ul#posts-container')
- const postContainer = await page.$('ul#posts-container')
- const posts = await postContainer.$$('h3.post-title')
- await Util.asyncForEach(posts, async post => {
- const parsedEpisode = await this.parsePostElement(post)
- if (parsedEpisode)
- episodes.push(parsedEpisode)
- })
- }
-
- await page.close()
-
- return episodes
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Get all title from on going page.
- */
- async checkOnGoingPage() {
- const anime = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- await page.goto(samehadaku_url, {
- timeout: 30000
- })
-
- await page.waitForSelector('.mag-box-container')
- const magBoxContainer = await page.$$('.mag-box-container')
- const container = magBoxContainer[samehadaku_magBoxContainer]
- const posts = await container.$$('li[class="post-item tie-standard"]')
-
- await Util.asyncForEach(posts, async (post) => {
- const titleHeader = await post.$('h3.post-title')
- const { title, link } = await titleHeader.$eval('a', node => ({
- title: node.innerText,
- link: node.href
- }))
- const parsedTitle = title.split(' Episode')[0]
- // const matches = link.match(/(?<=episode-)(\d+)(?=-subtitle-indonesia)/)
- const matches = link.match(/(?<=episode-)(\d+)/)
- if (matches && matches != null) {
- const numeral = matches[0].length == 1 ? '0' + matches[0] : matches[0]
-
- anime.push({
- episode: numeral,
- title: parsedTitle,
- link: link
- })
- }
- })
-
- await page.close()
-
- return anime
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse download links from episode page of a title.
- * @param link episode page.
- */
- async getDownloadLinks(link) {
- const page = await this.browser.newOptimizedPage()
- const downloadLinks = []
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 30000
- })
-
- await page.waitForSelector('div.download-eps')
- const downloadDivs = await page.$$('div.download-eps')
- await Util.asyncForEach(downloadDivs, async downloadDiv => {
- const p = await page.evaluateHandle(node => node.previousElementSibling, downloadDiv)
- let format = await this.browser.getPlainProperty(p, 'innerText')
- format = format.replace('', '')
- .replace('', '')
- .replace(/(&)/, '')
-
- if (format.match(/(3gp)/i)) {
- return false
- } else if (format.match(/(MKV)/i)) {
- format = 'MKV'
- } else if (format.match(/(265)/i)) {
- format = 'x265'
- } else if (format.match(/(MP4)/i)) {
- format = 'MP4'
- }
-
- const list = await downloadDiv.$$('li')
- await Util.asyncForEach(list, async item => {
- const strong = await item.$('strong')
- if (strong && strong != null) {
- const quality = await this.browser.getPlainProperty(strong, 'innerText')
- const anchors = await item.$$('a')
- await Util.asyncForEach(anchors, async anchor => {
- const host = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- downloadLinks.push({
- quality: `${format} ${quality}`,
- host: host,
- link: link
- })
- })
- }
- })
- })
-
- await page.close()
-
- return downloadLinks
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- async parseTetewBase64UrlParam(untetewed) {
- const queries = Util.getAllUrlParams(untetewed)
- if (queries.r) {
- return {
- url: Util.base64Decode(queries.r)
- }
- }
- return {
- url: untetewed
- }
- }
-
- /**
- * Parse tetew and get the final url.
- * @param link tetew url.
- */
- async tetew(link, skip = false) {
- let final
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 30000
- })
-
- await page.waitForSelector('div.download-link')
- const div = await page.$('div.download-link')
- const untetewed = await div.$eval('a', node => node.href)
-
- if (skip) {
- await page.close()
- return this.parseTetewBase64UrlParam(untetewed)
- }
-
- // njiir
- const unjiired = await this.njiir(encodeURI(untetewed))
- if (unjiired != false) {
- await page.close()
-
- return {
- url: unjiired.url
- }
- }
-
- // eue
- const uneue = await this.eueSiherp(encodeURI(untetewed))
- if (uneue != false) {
- await page.close()
-
- return {
- url: uneue.url
- }
- }
-
- await page.goto(untetewed, {
- timeout: 30000
- })
- try {
- await page.waitForSelector('div.download-link')
- const div2 = await page.$('div.download-link')
- const untetewed2 = await div2.$eval('a', node => node.href)
- await page.goto(untetewed2, {
- timeout: 30000
- })
- final = page.url()
- await page.close()
- } catch (e) {
- console.log(e)
- await page.close()
- return this.parseTetewBase64UrlParam(untetewed)
- }
-
- return {url: final}
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Parse njiir and get the original download link.
- * @param link njiir url.
- */
- async njiir(link) {
- const page = await this.browser.newOptimizedPage()
-
- try {
- let downloadLink, anchor
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 30000
- })
-
- await page.waitForSelector('div.result > a')
- await Util.sleep(8000)
- anchor = await page.$('div.result > a')
- downloadLink = await this.browser.getPlainProperty(anchor, 'href')
- if (downloadLink == 'javascript:' || downloadLink.includes('javascript') == true) {
- await anchor.click()
- }
- await Util.sleep(5000)
- anchor = await page.$('div.result > a')
- downloadLink = await this.browser.getPlainProperty(anchor, 'href')
- await page.close()
-
- return {url: downloadLink}
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- async eueSiherp(link) {
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- await page.goto(link, {
- timeout: 30000
- })
-
- await page.waitForSelector('button#download2')
- await page.click('button#download2')
- await Util.sleep(7000)
- await page.waitForSelector('button#download')
- await Promise.all([
- page.waitForNavigation({
- timeout: 0,
- waitUntil: 'networkidle2'
- }),
- page.click('button#download')
- ])
- const final = page.url()
- await page.close()
-
- return {url: final}
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- /**
- * Get new tab page instance.
- * @param page current page.
- * @param browser current browser.
- */
- async newPagePromise(page, browser) {
- const pageTarget = page.target()
- const newTarget = await browser.waitForTarget(target => target.opener() === pageTarget)
- const newPage = await newTarget.page()
-
- return newPage
- }
-
- //anjay.info
- async anjay(link) {
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- if (link.includes('ahexa.')) {
- return this.tetew(link, true)
- }
-
- await page.goto(link, {
- timeout: 30000
- })
-
- await Util.sleep(13000)
- await page.waitForSelector('div.to > a')
- await page.click('div.to > a')
- await page.waitForSelector('#showlink')
- await page.click('#showlink')
-
- const newPage = await this.newPagePromise(page, this.browser.browser)
- const url = newPage.url()
-
- await page.close()
- await newPage.close()
-
- const final = this.tetew(url, true)
-
- return final
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-}
-
-module.exports = Samehadaku
\ No newline at end of file
diff --git a/services/SamehadakuEas.js b/services/SamehadakuEas.js
deleted file mode 100644
index 159b614..0000000
--- a/services/SamehadakuEas.js
+++ /dev/null
@@ -1,102 +0,0 @@
-const Util = require('../utils/utils')
-const Handler = require('../exceptions/Handler')
-const { samehadaku_url } = require('../config.json')
-
-class Samehadaku {
- constructor(browser) {
- this.browser = browser
- }
-
- async checkOnGoingPage(navPage = 1) {
- const anime = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- await page.goto(`${samehadaku_url}/page/${navPage}/`, {
- timeout: 300000
- })
-
- await page.waitForSelector('div.white.updateanime')
-
- const posts = await page.$$('div.white.updateanime > ul > li')
- await Util.asyncForEach(posts, async (post) => {
- const titleHeader = await post.$('h2.entry-title')
- const { title, link } = await titleHeader.$eval('a', node => ({
- title: node.innerText,
- link: node.href
- }))
-
- const parsedTitle = title.split(' Episode')[0]
- const matches = link.match(/(?<=episode-)(\d+)/)
- if (matches && matches != null) {
- const numeral = matches[0].length == 1 ? '0' + matches[0] : matches[0]
-
- anime.push({
- episode: numeral,
- title: parsedTitle,
- link: link
- })
- }
- })
-
- await page.close()
-
- return anime
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-
- async getEpisodes(link) {
- const episodes = []
- const page = await this.browser.newOptimizedPage()
-
- try {
- link = decodeURIComponent(link)
- link = link.replace('/category/', '/anime/')
- await page.goto(link, {
- timeout: 300000
- })
- await page.waitForSelector('div.episodelist')
- const episodeList = await page.$$('div.episodelist > ul > li')
- await Util.asyncForEach(episodeList, async (item) => {
- const anchor = await item.$('span.lefttitle > a')
- const title = await this.browser.getPlainProperty(anchor, 'innerText')
- const link = await this.browser.getPlainProperty(anchor, 'href')
-
- if (!link.match(/(opening)/) && !link.match(/(ending)/)) {
- const episodeMatches = link.match(/(?<=episode-)(\d+)/)
- const ovaMatches = link.match(/-ova/)
- const ovaMatches2 = link.match(/ova-/)
-
- if (episodeMatches && episodeMatches != null) {
- const numeral = episodeMatches[0].length == 1 ? '0' + episodeMatches[0] : episodeMatches[0]
-
- episodes.push({
- episode: numeral,
- title: title,
- link: link
- })
- } else if ((ovaMatches && ovaMatches != null) || (ovaMatches2 && ovaMatches2 != null)) {
- episodes.push({
- episode: `${title}`,
- title: title,
- link: link
- })
- }
- }
- })
- await page.close()
-
- return episodes
- } catch (error) {
- await page.close()
-
- return Handler.error(error)
- }
- }
-}
-
-module.exports = Samehadaku
\ No newline at end of file
diff --git a/shortlinks/Ahexa.js b/shortlinks/Ahexa.js
new file mode 100644
index 0000000..07ac1c2
--- /dev/null
+++ b/shortlinks/Ahexa.js
@@ -0,0 +1,41 @@
+const Browser = require('../Browser')
+const Handler = require('../exceptions/Handler')
+const Util = require('../utils/utils')
+
+class Ahexa {
+ constructor() {
+ this.marker = 'ahexa.com'
+ }
+
+ async parse(link) {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(link)
+
+ await page.waitForSelector('div.download-link')
+ const div = await page.$('div.download-link')
+ const raw = await div.$eval('a', node => node.href)
+
+ await page.close()
+
+ const queries = Util.getAllUrlParams(raw)
+ if (queries.r) {
+ return {
+ url: Util.base64Decode(queries.r)
+ }
+ }
+
+ return {
+ url: raw
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Ahexa
\ No newline at end of file
diff --git a/shortlinks/Anjay.js b/shortlinks/Anjay.js
new file mode 100644
index 0000000..f4df8db
--- /dev/null
+++ b/shortlinks/Anjay.js
@@ -0,0 +1,63 @@
+const Browser = require('../Browser')
+const Handler = require('../exceptions/Handler')
+const Util = require('../utils/utils')
+
+class Anjay {
+ constructor() {
+ this.marker = 'anjay.info'
+ }
+
+ async parse(link) {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(link)
+
+ await Util.sleep(13000)
+ await page.waitForSelector('div.to > a')
+ await page.click('div.to > a')
+ await page.waitForSelector('#showlink')
+ await page.click('#showlink')
+
+ const newPage = await Browser.getNewTabPage(page)
+ await Util.sleep(2000)
+ const final = this.parseAxeha(newPage)
+
+ await page.close()
+
+ return final
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ async parseAxeha(page) {
+ try {
+ await page.waitForSelector('div.download-link')
+ const div = await page.$('div.download-link')
+ const raw = await div.$eval('a', node => node.href)
+
+ await page.close()
+
+ const queries = Util.getAllUrlParams(raw)
+ if (queries.r) {
+ return {
+ url: Util.base64Decode(queries.r)
+ }
+ }
+
+ return {
+ url: raw
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Anjay
\ No newline at end of file
diff --git a/shortlinks/Hexa.js b/shortlinks/Hexa.js
new file mode 100644
index 0000000..f44e6f7
--- /dev/null
+++ b/shortlinks/Hexa.js
@@ -0,0 +1,33 @@
+const Browser = require('../Browser')
+const Handler = require('../exceptions/Handler')
+const Util = require('../utils/utils')
+
+class Hexa {
+ constructor() {
+ this.marker = 'hexafile.net'
+ }
+
+ async parse(link) {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(link)
+
+ await Util.sleep(7000)
+ const anchor = await page.$('center.link-content > a')
+ const url = await Browser.getPlainProperty(anchor, 'href')
+ await page.close()
+
+ return {
+ url: url
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Hexa
\ No newline at end of file
diff --git a/shortlinks/Hightech.js b/shortlinks/Hightech.js
new file mode 100644
index 0000000..84c86d5
--- /dev/null
+++ b/shortlinks/Hightech.js
@@ -0,0 +1,49 @@
+const Browser = require('../Browser')
+const Handler = require('../exceptions/Handler')
+const Util = require('../utils/utils')
+
+class Hightech {
+ constructor() {
+ this.marker = 'hightech'
+ }
+
+ async parse(link) {
+ link = decodeURIComponent(link)
+ const params = Util.getAllUrlParams(link)
+ if (params.sitex) {
+ return {
+ url: Util.base64Decode(params.sitex)
+ }
+ }
+
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ await page.goto(link)
+
+ await Util.sleep(8000)
+ await page.waitForSelector('a[href="#generate"]')
+ await page.click('a[href="#generate"]')
+ await page.waitForSelector('a#link-download')
+ await Util.sleep(4000)
+ await page.click('a#link-download')
+
+ const newPage = await Browser.getNewTabPage(page)
+ await Util.sleep(2000)
+ const url = newPage.url()
+
+ await page.close()
+ await newPage.close()
+
+ return {
+ url: url
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Hightech
\ No newline at end of file
diff --git a/shortlinks/Jelajahinternet.js b/shortlinks/Jelajahinternet.js
new file mode 100644
index 0000000..fc1cb5f
--- /dev/null
+++ b/shortlinks/Jelajahinternet.js
@@ -0,0 +1,18 @@
+const Util = require('../utils/utils')
+
+class Jelajahinternet {
+ constructor() {
+ this.marker = 'jelajahinternet'
+ }
+
+ async parse(link) {
+ link = decodeURIComponent(link)
+ const params = Util.getAllUrlParams(link)
+
+ return {
+ url: decodeURIComponent(params.url)
+ }
+ }
+}
+
+module.exports = new Jelajahinternet
\ No newline at end of file
diff --git a/shortlinks/Kepoow.js b/shortlinks/Kepoow.js
new file mode 100644
index 0000000..c438081
--- /dev/null
+++ b/shortlinks/Kepoow.js
@@ -0,0 +1,18 @@
+const Util = require('../utils/utils')
+
+class Kepoow {
+ constructor() {
+ this.marker = 'kepoow'
+ }
+
+ async parse(link) {
+ link = decodeURIComponent(link)
+ const params = Util.getAllUrlParams(link)
+
+ return {
+ url: decodeURIComponent(Util.base64Decode(params.r))
+ }
+ }
+}
+
+module.exports = new Kepoow
\ No newline at end of file
diff --git a/shortlinks/Kontenajaib.js b/shortlinks/Kontenajaib.js
new file mode 100644
index 0000000..37d04fa
--- /dev/null
+++ b/shortlinks/Kontenajaib.js
@@ -0,0 +1,56 @@
+const Browser = require('../Browser')
+const Handler = require('../exceptions/Handler')
+const Util = require('../utils/utils')
+
+class Kontenajaib {
+ constructor() {
+ this.marker = 'kontenajaib'
+ }
+
+ async parse(link) {
+ const page = await Browser.newOptimizedPage()
+ let newPage = null, finalPage = null
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(link)
+
+ await Util.sleep(9500)
+ await page.click('#generater')
+ await Util.sleep(9500)
+ await page.click('#showlink')
+
+ const newPage = await Browser.getNewTabPage(page)
+ await Util.sleep(9500)
+ await newPage.click('#generater')
+ await Util.sleep(9500)
+ await newPage.click('#showlink')
+
+ const finalPage = await Browser.getNewTabPage(newPage)
+ await Util.sleep(2000)
+ const url = finalPage.url()
+
+ await page.close()
+ await newPage.close()
+ await finalPage.close()
+
+ return {
+ url: url
+ }
+ } catch (error) {
+ await page.close()
+
+ if (newPage) {
+ await newPage.close()
+ }
+
+ if (finalPage) {
+ await finalPage.close()
+ }
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Kontenajaib
\ No newline at end of file
diff --git a/shortlinks/Semawur.js b/shortlinks/Semawur.js
new file mode 100644
index 0000000..c8fa94b
--- /dev/null
+++ b/shortlinks/Semawur.js
@@ -0,0 +1,58 @@
+const Browser = require('../Browser')
+const Handler = require('../exceptions/Handler')
+const Util = require('../utils/utils')
+
+class Semawur {
+ constructor() {
+ this.marker = 'semawur'
+ }
+
+ async parse(link) {
+ link = decodeURIComponent(link)
+ const params = Util.getAllUrlParams(link)
+
+ if (Object.entries(params).length > 0 && params.url) {
+ return {
+ url: decodeURIComponent(params.url).replace(/\++/g, ' ')
+ }
+ }
+
+ const page = await Browser.newPageWithNewContext()
+
+ try {
+ await page.goto(link)
+
+ await page.waitForSelector('#link-view > button')
+ await Promise.all([
+ page.waitForNavigation({
+ timeout: 0,
+ waitUntil: 'networkidle2'
+ }),
+ page.click('#link-view > button')
+ ])
+ await page.waitForSelector('a.get-link')
+ await Util.sleep(5000)
+ const downloadButton = await page.$('a.get-link')
+
+ let classProp = await Browser.getPlainProperty(downloadButton, 'className')
+ do {
+ await Util.sleep(5000)
+ classProp = await Browser.getPlainProperty(downloadButton, 'className')
+ } while (classProp !== 'get-link')
+
+ const downloadLinks = await Browser.getPlainProperty(downloadButton, 'href')
+
+ await Browser.closePage(page)
+
+ return {
+ url: downloadLinks
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Semawur
\ No newline at end of file
diff --git a/shortlinks/Sukakesehattan.js b/shortlinks/Sukakesehattan.js
new file mode 100644
index 0000000..3d5be69
--- /dev/null
+++ b/shortlinks/Sukakesehattan.js
@@ -0,0 +1,18 @@
+const Util = require('../utils/utils')
+
+class Sukakesehattan {
+ constructor() {
+ this.marker = 'sukakesehattan'
+ }
+
+ async parse(link) {
+ link = decodeURIComponent(link)
+ const params = Util.getAllUrlParams(link)
+
+ return {
+ url: decodeURIComponent(params.url)
+ }
+ }
+}
+
+module.exports = new Sukakesehattan
\ No newline at end of file
diff --git a/shortlinks/Teknoku.js b/shortlinks/Teknoku.js
new file mode 100644
index 0000000..9352d0a
--- /dev/null
+++ b/shortlinks/Teknoku.js
@@ -0,0 +1,42 @@
+const Browser = require('../Browser')
+const Handler = require('../exceptions/Handler')
+
+class Teknoku {
+ constructor() {
+ this.marker = 'teknoku'
+ }
+
+ async parse(link) {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(link)
+
+ await Promise.all([
+ page.waitForNavigation({
+ waitUntil: 'domcontentloaded'
+ }),
+ page.$eval('#srl > form', form => form.submit()),
+ ])
+
+ const fullContent = await page.content()
+ await page.close()
+
+ // eslint-disable-next-line quotes
+ let splitted = fullContent.split("function changeLink(){var a='")
+ splitted = splitted[1].split(';window.open')
+ splitted = splitted[0].replace(/(['"])+/g, '')
+
+ return {
+ url: splitted
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Teknoku
\ No newline at end of file
diff --git a/shortlinks/Travellinginfos.js b/shortlinks/Travellinginfos.js
new file mode 100644
index 0000000..a0e9596
--- /dev/null
+++ b/shortlinks/Travellinginfos.js
@@ -0,0 +1,17 @@
+const Util = require('../utils/utils')
+
+class Travellinginfos {
+ constructor() {
+ this.marker = 'travellinginfos'
+ }
+
+ async parse(link) {
+ link = decodeURIComponent(link)
+
+ const params = Util.getAllUrlParams(link)
+
+ return Util.base64Decode(params.r)
+ }
+}
+
+module.exports = new Travellinginfos
\ No newline at end of file
diff --git a/shortlinks/Xmaster.js b/shortlinks/Xmaster.js
new file mode 100644
index 0000000..f8d1bd6
--- /dev/null
+++ b/shortlinks/Xmaster.js
@@ -0,0 +1,30 @@
+const Util = require('../utils/utils')
+const Handler = require('../exceptions/Handler')
+
+class Xmaster {
+ constructor() {
+ this.marker = 'xmaster.xyz'
+ }
+
+ async parse(link) {
+ link = decodeURIComponent(link)
+ const params = Util.getAllUrlParams(link)
+ let url = null
+
+ if (params.sitex) {
+ url = Util.base64Decode(params.sitex)
+ }
+
+ if (params.xyzkl) {
+ url = Util.base64Decode(params.xyzkl)
+ }
+
+ if (!url) {
+ return Handler.error('Error Xmaster: no paramater.')
+ }
+
+ return {url: url}
+ }
+}
+
+module.exports = new Xmaster
\ No newline at end of file
diff --git a/shortlinks/deprecated/Euesiherp.js b/shortlinks/deprecated/Euesiherp.js
new file mode 100644
index 0000000..f86b32c
--- /dev/null
+++ b/shortlinks/deprecated/Euesiherp.js
@@ -0,0 +1,42 @@
+const Browser = require('../../Browser')
+const Handler = require('../../exceptions/Handler')
+const Util = require('../../utils/utils')
+
+class Euesiherp {
+ constructor() {
+ this.marker = 'euesiherp'
+ }
+
+ async parse(link) {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(link)
+
+ await page.waitForSelector('button#download2')
+ await page.click('button#download2')
+ await Util.sleep(7000)
+ await page.waitForSelector('button#download')
+ await Promise.all([
+ page.waitForNavigation({
+ timeout: 0,
+ waitUntil: 'networkidle2'
+ }),
+ page.click('button#download')
+ ])
+ const final = page.url()
+ await page.close()
+
+ return {
+ url: final
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Euesiherp
\ No newline at end of file
diff --git a/shortlinks/deprecated/Njiir.js b/shortlinks/deprecated/Njiir.js
new file mode 100644
index 0000000..ab82cae
--- /dev/null
+++ b/shortlinks/deprecated/Njiir.js
@@ -0,0 +1,41 @@
+const Browser = require('../../Browser')
+const Handler = require('../../exceptions/Handler')
+const Util = require('../../utils/utils')
+
+class Njiir {
+ constructor() {
+ this.marker = 'njiir'
+ }
+
+ async parse(link) {
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ let downloadLink, anchor
+ link = decodeURIComponent(link)
+ await page.goto(link)
+
+ await page.waitForSelector('div.result > a')
+ await Util.sleep(8000)
+ anchor = await page.$('div.result > a')
+ downloadLink = await Browser.getPlainProperty(anchor, 'href')
+ if (downloadLink == 'javascript:' || downloadLink.includes('javascript') == true) {
+ await anchor.click()
+ }
+ await Util.sleep(5000)
+ anchor = await page.$('div.result > a')
+ downloadLink = await Browser.getPlainProperty(anchor, 'href')
+ await page.close()
+
+ return {
+ url: downloadLink
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+}
+
+module.exports = new Njiir
\ No newline at end of file
diff --git a/shortlinks/deprecated/Tetew.js b/shortlinks/deprecated/Tetew.js
new file mode 100644
index 0000000..a7afb35
--- /dev/null
+++ b/shortlinks/deprecated/Tetew.js
@@ -0,0 +1,80 @@
+const Browser = require('../../Browser')
+const Handler = require('../../exceptions/Handler')
+const Util = require('../../utils/utils')
+
+class Tetew {
+ async parse(link, skip = false) {
+ let final
+ const page = await Browser.newOptimizedPage()
+
+ try {
+ link = decodeURIComponent(link)
+ await page.goto(link)
+
+ await page.waitForSelector('div.download-link')
+ const div = await page.$('div.download-link')
+ const untetewed = await div.$eval('a', node => node.href)
+
+ if (skip) {
+ await page.close()
+ return this.parseTetewBase64UrlParam(untetewed)
+ }
+
+ // njiir
+ const unjiired = await this.njiir(encodeURI(untetewed))
+ if (unjiired != false) {
+ await page.close()
+
+ return {
+ url: unjiired.url
+ }
+ }
+
+ // eue
+ const uneue = await this.eueSiherp(encodeURI(untetewed))
+ if (uneue != false) {
+ await page.close()
+
+ return {
+ url: uneue.url
+ }
+ }
+
+ await page.goto(untetewed)
+ try {
+ await page.waitForSelector('div.download-link')
+ const div2 = await page.$('div.download-link')
+ const untetewed2 = await div2.$eval('a', node => node.href)
+ await page.goto(untetewed2)
+ final = page.url()
+ await page.close()
+ } catch (e) {
+ console.log(e)
+ await page.close()
+ return this.parseTetewBase64UrlParam(untetewed)
+ }
+
+ return {
+ url: final
+ }
+ } catch (error) {
+ await page.close()
+
+ return Handler.error(error)
+ }
+ }
+
+ async parseTetewBase64UrlParam(untetewed) {
+ const queries = Util.getAllUrlParams(untetewed)
+ if (queries.r) {
+ return {
+ url: Util.base64Decode(queries.r)
+ }
+ }
+ return {
+ url: untetewed
+ }
+ }
+}
+
+module.exports = new Tetew
\ No newline at end of file
diff --git a/shortlinks/index.js b/shortlinks/index.js
new file mode 100644
index 0000000..e5afdb7
--- /dev/null
+++ b/shortlinks/index.js
@@ -0,0 +1,51 @@
+const Ahexa = require('./Ahexa')
+const Anjay = require('./Anjay')
+const Hexa = require('./Hexa')
+const Hightech = require('./Hightech')
+const Jelajahinternet = require('./Jelajahinternet')
+const Kepoow = require('./Kepoow')
+const Kontenajaib = require('./Kontenajaib')
+const Semawur = require('./Semawur')
+const Sukakesehattan = require('./Sukakesehattan')
+const Teknoku = require('./Teknoku')
+const Travellinginfos = require('./Travellinginfos')
+const Xmaster = require('./Xmaster')
+const Handler = require('../exceptions/Handler')
+
+class Shortlink {
+ constructor() {
+ this.shorterners = [
+ Ahexa,
+ Anjay,
+ Hexa,
+ Hightech,
+ Jelajahinternet,
+ Kepoow,
+ Kontenajaib,
+ Semawur,
+ Sukakesehattan,
+ Teknoku,
+ Travellinginfos,
+ Xmaster
+ ]
+ }
+
+ async parse(link) {
+ let shorterner = null
+
+ for (const i of this.shorterners) {
+ if (link.includes(i.marker)) {
+ shorterner = i
+ break
+ }
+ }
+
+ if (!shorterner) {
+ return Handler.error('Error: Unknown shortlink.')
+ }
+
+ return await shorterner.parse(link)
+ }
+}
+
+module.exports = new Shortlink
\ No newline at end of file
diff --git a/test/integration/index.js b/test/integration/index.js
deleted file mode 100644
index 159a5bf..0000000
--- a/test/integration/index.js
+++ /dev/null
@@ -1,17 +0,0 @@
-/* eslint-disable no-undef */
-const supertest = require('supertest'),
- app = require('../../index.js')
-
-describe('home page', () => {
- before(function (done) {
- this.timeout(5000)
- setTimeout(done, 3000)
- })
-
- it('should return 200', (done) => {
- supertest(app)
- .get('/')
- .expect(200)
- .end(done)
- })
-})
\ No newline at end of file
diff --git a/test/integration/kiryuu.js b/test/integration/kiryuu.js
deleted file mode 100644
index 2cb7755..0000000
--- a/test/integration/kiryuu.js
+++ /dev/null
@@ -1,50 +0,0 @@
-/* eslint-disable no-undef */
-const supertest = require('supertest'),
- app = require('../../index.js')
-
-describe('kiryuu', function () {
- describe('manga list', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/kiryuu/mangaList')
- .expect(200)
- .end(done)
- })
- })
-
- describe('manga info', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/kiryuu/mangaInfo?link=%2Fmanga%2Firon-ladies%2F')
- .expect(200)
- .end(done)
- })
- })
-
- describe('manga chapters', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/kiryuu/chapters?link=%2Fmanga%2Firon-ladies')
- .expect(200)
- .end(done)
- })
- })
-
- describe('chapter images', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/kiryuu/images?link=%2Firon-ladies-chapter-99-bahasa-indonesia%2F')
- .expect(200)
- .end(done)
- })
- })
-
- describe('new releases', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/kiryuu/newReleases')
- .expect(200)
- .end(done)
- })
- })
-})
\ No newline at end of file
diff --git a/test/integration/kiryuu.test.js b/test/integration/kiryuu.test.js
new file mode 100644
index 0000000..08bbb7e
--- /dev/null
+++ b/test/integration/kiryuu.test.js
@@ -0,0 +1,145 @@
+/* eslint-disable no-undef */
+const supertest = require('supertest'),
+ expect = require('chai').expect,
+ app = require('../../index.js')
+
+describe('kiryuu', function () {
+ before(function (done) {
+ this.timeout(5000)
+ setTimeout(done, 3000)
+ })
+
+ describe('manga list', function () {
+ it('should return 200', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/kiryuu/mangaList')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ expect(res.body.data).to.not.be.empty
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('manga info', function () {
+ it('should return 200', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/kiryuu/mangaInfo?link=%2Fmanga%2Firon-ladies%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('title')
+ expect(res.body.data).to.has.property('cover')
+ expect(res.body.data).to.has.property('alternate_title')
+ expect(res.body.data).to.has.property('synopsis')
+ expect(res.body.data).to.has.property('author')
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('manga chapters', function () {
+ it('should return 200', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/kiryuu/chapters?link=%2Fmanga%2Firon-ladies')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('chapter')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('chapter images', function () {
+ it('should return 200', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/kiryuu/images?link=%2Firon-ladies-chapter-99-bahasa-indonesia%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('chapter')
+ expect(res.body.data).to.has.property('images')
+ res.body.data.images.forEach(image => {
+ expect(image).to.be.an('object')
+ expect(image).to.has.property('index')
+ expect(image).to.has.property('url')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return 200', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/kiryuu/newReleases')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('title_url')
+ expect(item).to.has.property('raw_title_url')
+ expect(item).to.has.property('chapter')
+ expect(item).to.has.property('chapter_url')
+ expect(item).to.has.property('raw_chapter_url')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/integration/kusonime.test.js b/test/integration/kusonime.test.js
new file mode 100644
index 0000000..733edfe
--- /dev/null
+++ b/test/integration/kusonime.test.js
@@ -0,0 +1,89 @@
+/* eslint-disable no-undef */
+const supertest = require('supertest'),
+ expect = require('chai').expect,
+ app = require('../../index.js')
+
+describe('kusonime', function () {
+ before(function (done) {
+ this.timeout(5000)
+ setTimeout(done, 3000)
+ })
+
+ describe('anime list', function () {
+ it('should return 200 and an array of anime list which has title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/kusonime/animeList')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return 200 and an array of download links which has quality, host, and link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/kusonime/links?link=%2Ftiger-mask-w-batch-subtitle-indonesia%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return 200 and an array of episodes which has episode, title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/kusonime/newReleases')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/integration/moenime.js b/test/integration/moenime.js
deleted file mode 100644
index 7eb8ea2..0000000
--- a/test/integration/moenime.js
+++ /dev/null
@@ -1,70 +0,0 @@
-/* eslint-disable no-undef */
-const supertest = require('supertest'),
- app = require('../../index.js')
-
-describe('moenime', function () {
- before(function (done) {
- this.timeout(5000)
- setTimeout(done, 3000)
- })
-
- describe('anime list', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/moenime/animeList')
- .expect(200)
- .end(function(err, res) {
- if (err) {
- console.log(res.body)
- return done(err)
- }
- done()
- })
- })
- })
-
- describe('episodes', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/moenime/episodes?link=/kandagawa-jet-girls-sub-indo/')
- .expect(200)
- .end(function(err, res) {
- if (err) {
- console.log(res.body)
- return done(err)
- }
- done()
- })
- })
- })
-
- describe('new releases', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/moenime/newReleases')
- .expect(200)
- .end(function(err, res) {
- if (err) {
- console.log(res.body)
- return done(err)
- }
- done()
- })
- })
- })
-
- describe('teknoku', function () {
- it('should return 200', function (done) {
- this.timeout(60000)
- supertest(app).get('/api/moenime/teknoku?link=https%3A%2F%2Fteknoku.me%2F%3Fid%3DcWFkTnBBZlEvZ1NvUHdYUGNkQ1ZPeGNnb0pjK2s1VDJWY2dlakh2Ykwrbjk0VkRUVGR2bWZwSHNpbVFVZUdhSjNTYUhySnBsS05jN2NmUHMzTk1BMWc9PQ%3D%3D')
- .expect(200)
- .end(function (err, res) {
- if (err) {
- console.log(res.body)
- return done(err)
- }
- done()
- })
- })
- })
-})
\ No newline at end of file
diff --git a/test/integration/moenime.test.js b/test/integration/moenime.test.js
new file mode 100644
index 0000000..3695a0b
--- /dev/null
+++ b/test/integration/moenime.test.js
@@ -0,0 +1,92 @@
+/* eslint-disable no-undef */
+const supertest = require('supertest'),
+ expect = require('chai').expect,
+ app = require('../../index.js')
+
+describe('moenime', function () {
+ before(function (done) {
+ this.timeout(5000)
+ setTimeout(done, 3000)
+ })
+
+ describe('anime list', function () {
+ it('should return 200 and an array of anime list which has title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/moenime/animeList')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function(err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return 200 and episode objects with an array of download links which has quality, host, and link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/moenime/links?link=%2Fabsolute-duo-sub-indo%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('episode_01')
+ expect(res.body.data.episode_01).to.be.an('array')
+ res.body.data.episode_01.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ .end(function(err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return 200 and an array of episodes which has episode, title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/moenime/newReleases')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function(err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/integration/neonime.test.js b/test/integration/neonime.test.js
new file mode 100644
index 0000000..8232c21
--- /dev/null
+++ b/test/integration/neonime.test.js
@@ -0,0 +1,116 @@
+/* eslint-disable no-undef */
+const supertest = require('supertest'),
+ expect = require('chai').expect,
+ app = require('../../index.js')
+
+describe('neonime', function () {
+ before(function (done) {
+ this.timeout(5000)
+ setTimeout(done, 3000)
+ })
+
+ describe('episodes', function () {
+ it('should return 200 and an array of episodes which has episode, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/neonime/episodes?link=%2Ftvshows%2Fa-i-c-o-incarnation-subtitle-indonesia%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return 200 and an array of download links which has quality, host, and link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/neonime/links?link=%2Fepisode%2Fa-i-c-o-incarnation-1x12')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('batch links', function () {
+ it('should return 200 and an array of download links which has quality, host, and link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/neonime/links?link=%2Fbatch%2Fakame-ga-kill-bd-batch-subtitle-indonesia%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return 200 and an array of episodes which has episode, title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/neonime/newReleases')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/integration/oploverz.test.js b/test/integration/oploverz.test.js
new file mode 100644
index 0000000..18dd4e5
--- /dev/null
+++ b/test/integration/oploverz.test.js
@@ -0,0 +1,117 @@
+/* eslint-disable no-undef */
+const supertest = require('supertest'),
+ expect = require('chai').expect,
+ app = require('../../index.js')
+
+describe('oploverz', function () {
+ before(function (done) {
+ this.skip()
+ this.timeout(5000)
+ setTimeout(done, 3000)
+ })
+
+ describe('anime list', function () {
+ it('should return 200 and an array of anime list which has title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/oploverz/animeList')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('episodes', function () {
+ it('should return 200 and an array of episodes which has episode, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/oploverz/episodes?link=%2Fseries%2F3d-kanojo-real-girl-s2%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return 200 and an array of download links which has quality, host, and link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/oploverz/links?link=%2F3d-kanojo-real-girl-s2-12-subtitle-indonesia-end%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return 200 and an array of episodes which has episode, title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/oploverz/newReleases')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/integration/samehadaku.test.js b/test/integration/samehadaku.test.js
new file mode 100644
index 0000000..1068b61
--- /dev/null
+++ b/test/integration/samehadaku.test.js
@@ -0,0 +1,116 @@
+/* eslint-disable no-undef */
+const supertest = require('supertest'),
+ expect = require('chai').expect,
+ app = require('../../index.js')
+
+describe('samehadaku', function () {
+ before(function (done) {
+ this.timeout(5000)
+ setTimeout(done, 3000)
+ })
+
+ describe('anime list', function () {
+ it('should return 200 and an array of anime list which has title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/samehadaku/animeList')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('episodes', function () {
+ it('should return 200 and an array of episodes which has episode, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/samehadaku/episodes?link=%2Fanime%2Fgegege-no-kitarou-2018%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return 200 and an array of download links which has quality, host, and link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/samehadaku/links?link=%2Fgegege-no-kitarou-episode-87%2F')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return 200 and an array of episodes which has episode, title, link, and raw link', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/samehadaku/newReleases')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('array')
+ res.body.data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/integration/shortlink.test.js b/test/integration/shortlink.test.js
new file mode 100644
index 0000000..134051c
--- /dev/null
+++ b/test/integration/shortlink.test.js
@@ -0,0 +1,149 @@
+/* eslint-disable no-undef */
+const supertest = require('supertest'),
+ expect = require('chai').expect,
+ app = require('../../index.js')
+
+describe('shortlink', function () {
+ before(function (done) {
+ this.timeout(5000)
+ setTimeout(done, 3000)
+ })
+
+ describe('teknoku', function () {
+ it('should return 200 and a string of url', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/shortlink?link=https%3A%2F%2Fteknoku.me%2F%3Fid%3DcWFkTnBBZlEvZ1NvUHdYUGNkQ1ZPeGNnb0pjK2s1VDJWY2dlakh2Ykwrbjk0VkRUVGR2bWZwSHNpbVFVZUdhSjNTYUhySnBsS05jN2NmUHMzTk1BMWc9PQ%3D%3D')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('url')
+ expect(res.body.data.url).to.be.a('string')
+ expect(res.body.data.url).to.equal('https://www60.zippyshare.com/v/dpn65heR/file.html')
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('jelajahinternet', function () {
+ it('should return 200 and a string of url', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/shortlink?link=https%3A%2F%2Fjelajahinternet.me%2Ffull%2F%3Fapi%3Da43e9781fc804e34814e29bf4c2bb518989da6ad%26url%3Dhttps%253A%252F%252Facefile.co%252Ff%252F16742192%252Fkusonime-topeng-macan-w-001-020-360p-rar%26type%3D2')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('url')
+ expect(res.body.data.url).to.be.a('string')
+ expect(res.body.data.url).to.equal('https://acefile.co/f/16742192/kusonime-topeng-macan-w-001-020-360p-rar')
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('xmaster', function () {
+ it('should return 200 and a string of url', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/shortlink?link=https%3A%2F%2Fxmaster.xyz%2F%3Fsitex%3DaHR0cHM6Ly9zZW5kaXQuY2xvdWQvN24zNXZlcGNibXpq')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('url')
+ expect(res.body.data.url).to.be.a('string')
+ expect(res.body.data.url).to.equal('https://sendit.cloud/7n35vepcbmzj')
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('kontenajaib', function () {
+ it('should return 200 and a string of url', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/shortlink?link=https%3A%2F%2Fkontenajaib.xyz%2F%3Fid%3DWWI3dG5VNlRiUUoyVUdOcEpnZ0kxL3lRck5zZlAweVFwdzBUOGpiTDdZSWhNTGtvMjNEYjVTMjdMcUFDNjl5ZUxCbWJaUlZYM2FSalhuQlorVStsMENuWjAzQ2FON05jM2Rtc05pYUQ2VTV0a01YUXJvQ1M0U1d3L0t4bzQrWFFCLzZjNDMwWnJqdlk0dXoxYndBcHdSUzNmZUthVGhRcWFWRGFrWDkyeFdEZjJMNWRWbkFNbGZLdE8xSS9admcyZUZuMGl6MDQzN0V2TGxaQmlsZGNQSjd3SkZTSFEvd291em5IektjTkFZWjRqbUdweFVDcEFFLytnUkgwNC92SXdXeWpEeHliTCtTbzQyOFZBWk9iMWE1NE5xdWVHTWNkc1I3Z2R2YmxSSnJVQ3haeHV5V0UxY2NqcVNnZFM4SlJmQXRaWGVsVS9RVXFPZmprODNkc1EzTGQ4V2NEcENHZU5pajkxSnVFNmlZPQ%3D%3D')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('url')
+ expect(res.body.data.url).to.be.a('string')
+ expect(res.body.data.url).to.include('1eoR2pnJpuygsi8nwOlcft8jL502ppMiE')
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('hexa', function () {
+ it('should return 200 and a string of url', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/shortlink?link=https%3A%2F%2Fhexafile.net%2Fu3CSw')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('url')
+ expect(res.body.data.url).to.be.a('string')
+ expect(res.body.data.url).to.equal('https://www63.zippyshare.com/v/ACM44jzR/file.html')
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+
+ describe('anjay', function () {
+ it('should return 200 and a string of url', function (done) {
+ this.timeout(60000)
+ supertest(app).get('/api/shortlink?link=https%3A%2F%2Fanjay.info%2F%3Fid%3DVWErNWlBZmpCUlMvT0pxVHE3YS84bGJVZGkrVjNwejZLTnR2UmVxRVJxell2UmdXdzA4T2tDVjBNK3gzcWk3Lw%3D%3D')
+ .expect(200)
+ .expect(function (res) {
+ expect(res.body.status).to.equal(200)
+ expect(res.body.message).to.equal('Success')
+ expect(res.body.data).to.be.an('object')
+ expect(res.body.data).to.has.property('url')
+ expect(res.body.data.url).to.be.a('string')
+ expect(res.body.data.url).to.equal('https://www32.zippyshare.com/v/IL24rZLX/file.html')
+ })
+ .end(function (err, res) {
+ if (err) {
+ console.log(res.body)
+ return done(err)
+ }
+ done()
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/unit/kiryuu.test.js b/test/unit/kiryuu.test.js
new file mode 100644
index 0000000..3cbddc2
--- /dev/null
+++ b/test/unit/kiryuu.test.js
@@ -0,0 +1,94 @@
+/* eslint-disable no-undef */
+const expect = require('chai').expect
+const Browser = require('../../Browser')
+
+describe('kiryuu', function () {
+ describe('manga list', function () {
+ it('should return an array of manga list which has title, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Kiryuu = require('../../fantls/Kiryuu')
+ const list = await Kiryuu.mangaList()
+
+ expect(list).to.be.an('array')
+ expect(list).to.not.be.empty
+ list.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+
+ describe('manga info', function () {
+ it('should return an object which has title, cover, alterate title, synopsis, and author', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Kiryuu = require('../../fantls/Kiryuu')
+ const data = await Kiryuu.mangaInfo('%2Fmanga%2Firon-ladies%2F')
+
+ expect(data).to.be.an('object')
+ expect(data).to.has.property('title')
+ expect(data).to.has.property('cover')
+ expect(data).to.has.property('alternate_title')
+ expect(data).to.has.property('synopsis')
+ expect(data).to.has.property('author')
+ })
+ })
+
+ describe('manga chapters', function () {
+ it('should return an array of chapters which has chapter, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Kiryuu = require('../../fantls/Kiryuu')
+ const data = await Kiryuu.chapters('%2Fmanga%2Firon-ladies')
+
+ expect(data).to.be.an('array')
+ data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('chapter')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+
+ describe('chapter images', function () {
+ it('should return an array of images which has index and url', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Kiryuu = require('../../fantls/Kiryuu')
+ const data = await Kiryuu.images('%2Firon-ladies-chapter-99-bahasa-indonesia%2F')
+
+ expect(data).to.be.an('object')
+ expect(data).to.has.property('chapter')
+ expect(data).to.has.property('images')
+ data.images.forEach(image => {
+ expect(image).to.be.an('object')
+ expect(image).to.has.property('index')
+ expect(image).to.has.property('url')
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return an array of manga which has title, title url, raw title url, chapter, chapter url, raw chapter url', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Kiryuu = require('../../fantls/Kiryuu')
+ const data = await Kiryuu.newReleases()
+
+ expect(data).to.be.an('array')
+ data.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('title_url')
+ expect(item).to.has.property('raw_title_url')
+ expect(item).to.has.property('chapter')
+ expect(item).to.has.property('chapter_url')
+ expect(item).to.has.property('raw_chapter_url')
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/unit/kusonime.test.js b/test/unit/kusonime.test.js
new file mode 100644
index 0000000..5154957
--- /dev/null
+++ b/test/unit/kusonime.test.js
@@ -0,0 +1,57 @@
+/* eslint-disable no-undef */
+const expect = require('chai').expect
+const Browser = require('../../Browser')
+
+describe('kusonime', function () {
+ describe('anime list', function () {
+ it('should return an array of anime list which has title, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Kusonime = require('../../fansubs/Kusonime')
+ const list = await Kusonime.animeList()
+
+ expect(list).to.be.an('array')
+ expect(list).to.not.be.empty
+ list.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return an array of download links which has quality, host, and link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Kusonime = require('../../fansubs/Kusonime')
+ const links = await Kusonime.links('%2Ftiger-mask-w-batch-subtitle-indonesia%2F')
+
+ expect(links).to.be.an('array')
+ links.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return an array of episodes which has episode, title, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Kusonime = require('../../fansubs/Kusonime')
+ const list = await Kusonime.newReleases()
+
+ expect(list).to.be.an('array')
+ list.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/unit/moenime.js b/test/unit/moenime.test.js
similarity index 64%
rename from test/unit/moenime.js
rename to test/unit/moenime.test.js
index cd89753..1fbefc1 100644
--- a/test/unit/moenime.js
+++ b/test/unit/moenime.test.js
@@ -1,21 +1,22 @@
/* eslint-disable no-undef */
const expect = require('chai').expect
-const Browser = require('../../services/Browser')
+const Browser = require('../../Browser')
describe('moenime', function () {
describe('anime list', function() {
- it('should return an array of anime list with title and link', async function () {
+ it('should return an array of anime list with title, link, and raw link', async function () {
this.timeout(60000)
await Browser.init()
- const Moenime = new(require('../../services/Moenime'))(Browser)
+ const Moenime = require('../../fansubs/Moenime')
const list = await Moenime.animeList()
expect(list).to.be.an('array')
expect(list).to.not.be.empty
list.forEach(anime => {
expect(anime).to.be.an('object')
- expect(anime).to.has.property('link')
expect(anime).to.has.property('title')
+ expect(anime).to.has.property('link')
+ expect(anime).to.has.property('raw_link')
})
})
})
@@ -24,7 +25,7 @@ describe('moenime', function () {
it('should return an object which has array of episodes each with its own quality, host, and link', async function () {
this.timeout(60000)
await Browser.init()
- const Moenime = new(require('../../services/Moenime'))(Browser)
+ const Moenime = require('../../fansubs/Moenime')
const episodes = await Moenime.episodes('%2Fabsolute-duo-sub-indo%2F')
expect(episodes).to.be.an('object')
@@ -32,7 +33,6 @@ describe('moenime', function () {
for (let episode in episodes) {
if (episodes.hasOwnProperty(episode)) {
expect(episodes[episode]).to.be.an('array')
- expect(episodes[episode]).to.have.lengthOf(18)
episodes[episode].forEach(file => {
expect(file).to.has.property('quality')
expect(file).to.has.property('host')
@@ -44,10 +44,10 @@ describe('moenime', function () {
})
describe('new releases', function () {
- it('should return an array of anime list with episode, title, and link', async function () {
+ it('should return an array of anime list with episode, title, link, and raw link', async function () {
this.timeout(60000)
await Browser.init()
- const Moenime = new(require('../../services/Moenime'))(Browser)
+ const Moenime = require('../../fansubs/Moenime')
const list = await Moenime.newReleases()
expect(list).to.be.an('array')
@@ -57,21 +57,8 @@ describe('moenime', function () {
expect(anime).to.has.property('episode')
expect(anime).to.has.property('title')
expect(anime).to.has.property('link')
+ expect(anime).to.has.property('raw_link')
})
})
})
-
-
- describe('teknoku', function () {
- it('should return an object which has url property', async function () {
- this.timeout(60000)
- await Browser.init()
- const Moenime = new(require('../../services/Moenime'))(Browser)
- const teknoku = await Moenime.teknoku('https%3A%2F%2Fteknoku.me%2F%3Fid%3DcWFkTnBBZlEvZ1NvUHdYUGNkQ1ZPeGNnb0pjK2s1VDJWY2dlakh2Ykwrbjk0VkRUVGR2bWZwSHNpbVFVZUdhSjNTYUhySnBsS05jN2NmUHMzTk1BMWc9PQ%3D%3D')
-
- expect(teknoku).to.be.an('object')
- expect(teknoku).to.has.property('url')
- expect(teknoku.url).to.be.a('string')
- })
- })
})
\ No newline at end of file
diff --git a/test/unit/neonime.test.js b/test/unit/neonime.test.js
new file mode 100644
index 0000000..f916f63
--- /dev/null
+++ b/test/unit/neonime.test.js
@@ -0,0 +1,74 @@
+/* eslint-disable no-undef */
+const expect = require('chai').expect
+const Browser = require('../../Browser')
+
+describe('neonime', function () {
+ describe('episodes', function () {
+ it('should return an array of episodes which has episode, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Neonime = require('../../fansubs/Neonime')
+ const episodes = await Neonime.episodes('%2Ftvshows%2Fa-i-c-o-incarnation-subtitle-indonesia%2F')
+
+ expect(episodes).to.be.an('array')
+ episodes.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return an array of download links which has quality, host, and link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Neonime = require('../../fansubs/Neonime')
+ const links = await Neonime.links('%2Fepisode%2Fa-i-c-o-incarnation-1x12')
+
+ expect(links).to.be.an('array')
+ links.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ })
+
+ describe('batch links', function () {
+ it('should return an array of download links which has quality, host, and link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Neonime = require('../../fansubs/Neonime')
+ const links = await Neonime.links('%2Fbatch%2Fakame-ga-kill-bd-batch-subtitle-indonesia%2F')
+
+ expect(links).to.be.an('array')
+ links.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return an array of episodes which has episode, title, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Neonime = require('../../fansubs/Neonime')
+ const list = await Neonime.newReleases()
+
+ expect(list).to.be.an('array')
+ list.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/unit/oploverz.test.js b/test/unit/oploverz.test.js
new file mode 100644
index 0000000..66b18b1
--- /dev/null
+++ b/test/unit/oploverz.test.js
@@ -0,0 +1,79 @@
+/* eslint-disable no-undef */
+const expect = require('chai').expect
+const Browser = require('../../Browser')
+
+describe('oploverz', function () {
+ before(function () {
+ this.skip()
+ })
+
+ describe('anime list', function () {
+ it('should return an array of anime list which has title, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Oploverz = require('../../fansubs/Oploverz')
+ const list = await Oploverz.animeList()
+
+ expect(list).to.be.an('array')
+ expect(list).to.not.be.empty
+ list.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+
+ describe('episodes', function () {
+ it('should return an array of episodes which has episode, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Oploverz = require('../../fansubs/Oploverz')
+ const episodes = await Oploverz.episodes('%2Fanime%2Fgegege-no-kitarou-2018%2F')
+
+ expect(episodes).to.be.an('array')
+ episodes.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return an array of download links which has quality, host, and link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Oploverz = require('../../fansubs/Oploverz')
+ const links = await Oploverz.links('%2Fgegege-no-kitarou-episode-87%2F')
+
+ expect(links).to.be.an('array')
+ links.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return an array of episodes which has episode, title, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Oploverz = require('../../fansubs/Oploverz')
+ const list = await Oploverz.newReleases()
+
+ expect(list).to.be.an('array')
+ list.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/unit/samehadaku.test.js b/test/unit/samehadaku.test.js
new file mode 100644
index 0000000..5be2ea8
--- /dev/null
+++ b/test/unit/samehadaku.test.js
@@ -0,0 +1,75 @@
+/* eslint-disable no-undef */
+const expect = require('chai').expect
+const Browser = require('../../Browser')
+
+describe('samehadaku', function () {
+ describe('anime list', function () {
+ it('should return an array of anime list which has title, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Samehadaku = require('../../fansubs/Samehadaku')
+ const list = await Samehadaku.animeList()
+
+ expect(list).to.be.an('array')
+ expect(list).to.not.be.empty
+ list.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+
+ describe('episodes', function () {
+ it('should return an array of episodes which has episode, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Samehadaku = require('../../fansubs/Samehadaku')
+ const episodes = await Samehadaku.episodes('%2Fanime%2Fgegege-no-kitarou-2018%2F')
+
+ expect(episodes).to.be.an('array')
+ episodes.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+
+ describe('links', function () {
+ it('should return an array of download links which has quality, host, and link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Samehadaku = require('../../fansubs/Samehadaku')
+ const links = await Samehadaku.links('%2Fgegege-no-kitarou-episode-87%2F')
+
+ expect(links).to.be.an('array')
+ links.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('quality')
+ expect(item).to.has.property('host')
+ expect(item).to.has.property('link')
+ })
+ })
+ })
+
+ describe('new releases', function () {
+ it('should return an array of episodes which has episode, title, link, and raw link', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const Samehadaku = require('../../fansubs/Samehadaku')
+ const list = await Samehadaku.newReleases()
+
+ expect(list).to.be.an('array')
+ list.forEach(item => {
+ expect(item).to.be.an('object')
+ expect(item).to.has.property('episode')
+ expect(item).to.has.property('title')
+ expect(item).to.has.property('link')
+ expect(item).to.has.property('raw_link')
+ })
+ })
+ })
+})
\ No newline at end of file
diff --git a/test/unit/shortlink.test.js b/test/unit/shortlink.test.js
new file mode 100644
index 0000000..c856b84
--- /dev/null
+++ b/test/unit/shortlink.test.js
@@ -0,0 +1,87 @@
+/* eslint-disable no-undef */
+const expect = require('chai').expect
+const Browser = require('../../Browser')
+const Shortlink = require('../../shortlinks')
+
+describe('shortlink', function () {
+ describe('teknoku', function () {
+ it('should return an object which has url property', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const data = await Shortlink.parse('https%3A%2F%2Fteknoku.me%2F%3Fid%3DcWFkTnBBZlEvZ1NvUHdYUGNkQ1ZPeGNnb0pjK2s1VDJWY2dlakh2Ykwrbjk0VkRUVGR2bWZwSHNpbVFVZUdhSjNTYUhySnBsS05jN2NmUHMzTk1BMWc9PQ%3D%3D')
+
+ expect(data).to.be.an('object')
+ expect(data).to.has.property('url')
+ expect(data.url).to.be.a('string')
+ })
+ })
+
+ describe('jelajahinternet', function () {
+ it('should return an object which has a string url property', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const data = await Shortlink.parse('https%3A%2F%2Fjelajahinternet.me%2Ffull%2F%3Fapi%3Da43e9781fc804e34814e29bf4c2bb518989da6ad%26url%3Dhttps%253A%252F%252Facefile.co%252Ff%252F16742192%252Fkusonime-topeng-macan-w-001-020-360p-rar%26type%3D2')
+
+ expect(data).to.be.an('object')
+ expect(data).to.has.property('url')
+ expect(data.url).to.be.a('string')
+ expect(data.url).to.equal('https://acefile.co/f/16742192/kusonime-topeng-macan-w-001-020-360p-rar')
+ })
+ })
+
+ describe('xmaster', function () {
+ it('should return an object which has a string url property', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const data = await Shortlink.parse('https%3A%2F%2Fxmaster.xyz%2F%3Fsitex%3DaHR0cHM6Ly9zZW5kaXQuY2xvdWQvN24zNXZlcGNibXpq')
+
+ expect(data).to.be.an('object')
+ expect(data).to.has.property('url')
+ expect(data.url).to.be.a('string')
+ expect(data.url).to.equal('https://sendit.cloud/7n35vepcbmzj')
+ })
+ })
+
+ describe('kontenajaib', function () {
+ before(function () {
+ this.skip()
+ })
+
+ it('should return an object which has a string url property', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const data = await Shortlink.parse('https%3A%2F%2Fkontenajaib.xyz%2F%3Fid%3DemFXMEdBNC9HbDBMUTh0SFdiRHVQaFEyRWhKS3YzVEJSRHlrRVlEbExLZUNlSEdZaENXTW5mWllrNTliSXYrMXQ2NnhXOEZUL1BkSkpvbXAyRHg2ZE9ycVdZTlU3ejc1TUV5RXFWNkhxc3ZQQnVicW9jdTBtYk5SSjMxb2JLOTEwOFVGK1hSTks3N0txTkxOZHdsWUF4enVwVEtkQ0htUFA1LzJhNmZ1bkdkQ3RJNS9mNHhJOFlMMUdLWEtOQnlwSzE0QlVpODkvZ3RBYmZIQVpMbnVBK3IwOG5xRWFnU1FDOFBQRG55dkhKZmtWQldmM2Jtb1lvRCtmRHhMdXdNVE9DUDNzUjlWeUwrTm1FSEVPb1cvTTV3cFk0NTlCbms3NnVFYkpqRmFnSHczNWxFNllDN2E5VHVLdDdjZTU3dU0%3D')
+
+ expect(data).to.be.an('object')
+ expect(data).to.has.property('url')
+ expect(data.url).to.be.a('string')
+ expect(data.url).to.include('zippyshare.com')
+ })
+ })
+
+ describe('hexa', function () {
+ it('should return an object which has a string url property', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const data = await Shortlink.parse('https%3A%2F%2Fhexafile.net%2Fu3CSw')
+
+ expect(data).to.be.an('object')
+ expect(data).to.has.property('url')
+ expect(data.url).to.be.a('string')
+ expect(data.url).to.equal('https://www63.zippyshare.com/v/ACM44jzR/file.html')
+ })
+ })
+
+ describe('anjay', function () {
+ it('should return an object which has a string url property', async function () {
+ this.timeout(60000)
+ await Browser.init()
+ const data = await Shortlink.parse('https%3A%2F%2Fanjay.info%2F%3Fid%3DVWErNWlBZmpCUlMvT0pxVHE3YS84bGJVZGkrVjNwejZLTnR2UmVxRVJxell2UmdXdzA4T2tDVjBNK3gzcWk3Lw%3D%3D')
+
+ expect(data).to.be.an('object')
+ expect(data).to.has.property('url')
+ expect(data.url).to.be.a('string')
+ expect(data.url).to.equal('https://www32.zippyshare.com/v/IL24rZLX/file.html')
+ })
+ })
+})
\ No newline at end of file
diff --git a/utils/utils.js b/utils/utils.js
index 2303f7e..f06c460 100644
--- a/utils/utils.js
+++ b/utils/utils.js
@@ -48,6 +48,7 @@ class Util {
*/
base64Decode(string) {
const buff = Buffer.from(string, 'base64')
+
return buff.toString('ascii')
}